repo_id stringclasses 875 values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
googleapis/google-cloud-java | 38,176 | java-retail/proto-google-cloud-retail-v2beta/src/main/java/com/google/cloud/retail/v2beta/UpdateAttributesConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2beta/catalog_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2beta;
/**
*
*
* <pre>
* Request for
* [CatalogService.UpdateAttributesConfig][google.cloud.retail.v2beta.CatalogService.UpdateAttributesConfig]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.UpdateAttributesConfigRequest}
*/
public final class UpdateAttributesConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2beta.UpdateAttributesConfigRequest)
UpdateAttributesConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateAttributesConfigRequest.newBuilder() to construct.
private UpdateAttributesConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateAttributesConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateAttributesConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.CatalogServiceProto
.internal_static_google_cloud_retail_v2beta_UpdateAttributesConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.CatalogServiceProto
.internal_static_google_cloud_retail_v2beta_UpdateAttributesConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest.class,
com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest.Builder.class);
}
private int bitField0_;
public static final int ATTRIBUTES_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.retail.v2beta.AttributesConfig attributesConfig_;
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the attributesConfig field is set.
*/
@java.lang.Override
public boolean hasAttributesConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The attributesConfig.
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.AttributesConfig getAttributesConfig() {
return attributesConfig_ == null
? com.google.cloud.retail.v2beta.AttributesConfig.getDefaultInstance()
: attributesConfig_;
}
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.AttributesConfigOrBuilder getAttributesConfigOrBuilder() {
return attributesConfig_ == null
? com.google.cloud.retail.v2beta.AttributesConfig.getDefaultInstance()
: attributesConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAttributesConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getAttributesConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest other =
(com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest) obj;
if (hasAttributesConfig() != other.hasAttributesConfig()) return false;
if (hasAttributesConfig()) {
if (!getAttributesConfig().equals(other.getAttributesConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAttributesConfig()) {
hash = (37 * hash) + ATTRIBUTES_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getAttributesConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for
* [CatalogService.UpdateAttributesConfig][google.cloud.retail.v2beta.CatalogService.UpdateAttributesConfig]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.UpdateAttributesConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2beta.UpdateAttributesConfigRequest)
com.google.cloud.retail.v2beta.UpdateAttributesConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.CatalogServiceProto
.internal_static_google_cloud_retail_v2beta_UpdateAttributesConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.CatalogServiceProto
.internal_static_google_cloud_retail_v2beta_UpdateAttributesConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest.class,
com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest.Builder.class);
}
// Construct using com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getAttributesConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
attributesConfig_ = null;
if (attributesConfigBuilder_ != null) {
attributesConfigBuilder_.dispose();
attributesConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2beta.CatalogServiceProto
.internal_static_google_cloud_retail_v2beta_UpdateAttributesConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest build() {
com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest buildPartial() {
com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest result =
new com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.attributesConfig_ =
attributesConfigBuilder_ == null ? attributesConfig_ : attributesConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest) {
return mergeFrom((com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest other) {
if (other
== com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest.getDefaultInstance())
return this;
if (other.hasAttributesConfig()) {
mergeAttributesConfig(other.getAttributesConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getAttributesConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.retail.v2beta.AttributesConfig attributesConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2beta.AttributesConfig,
com.google.cloud.retail.v2beta.AttributesConfig.Builder,
com.google.cloud.retail.v2beta.AttributesConfigOrBuilder>
attributesConfigBuilder_;
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the attributesConfig field is set.
*/
public boolean hasAttributesConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The attributesConfig.
*/
public com.google.cloud.retail.v2beta.AttributesConfig getAttributesConfig() {
if (attributesConfigBuilder_ == null) {
return attributesConfig_ == null
? com.google.cloud.retail.v2beta.AttributesConfig.getDefaultInstance()
: attributesConfig_;
} else {
return attributesConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAttributesConfig(com.google.cloud.retail.v2beta.AttributesConfig value) {
if (attributesConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
attributesConfig_ = value;
} else {
attributesConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAttributesConfig(
com.google.cloud.retail.v2beta.AttributesConfig.Builder builderForValue) {
if (attributesConfigBuilder_ == null) {
attributesConfig_ = builderForValue.build();
} else {
attributesConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeAttributesConfig(com.google.cloud.retail.v2beta.AttributesConfig value) {
if (attributesConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& attributesConfig_ != null
&& attributesConfig_
!= com.google.cloud.retail.v2beta.AttributesConfig.getDefaultInstance()) {
getAttributesConfigBuilder().mergeFrom(value);
} else {
attributesConfig_ = value;
}
} else {
attributesConfigBuilder_.mergeFrom(value);
}
if (attributesConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearAttributesConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
attributesConfig_ = null;
if (attributesConfigBuilder_ != null) {
attributesConfigBuilder_.dispose();
attributesConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2beta.AttributesConfig.Builder getAttributesConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAttributesConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2beta.AttributesConfigOrBuilder getAttributesConfigOrBuilder() {
if (attributesConfigBuilder_ != null) {
return attributesConfigBuilder_.getMessageOrBuilder();
} else {
return attributesConfig_ == null
? com.google.cloud.retail.v2beta.AttributesConfig.getDefaultInstance()
: attributesConfig_;
}
}
/**
*
*
* <pre>
* Required. The
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* </pre>
*
* <code>
* .google.cloud.retail.v2beta.AttributesConfig attributes_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2beta.AttributesConfig,
com.google.cloud.retail.v2beta.AttributesConfig.Builder,
com.google.cloud.retail.v2beta.AttributesConfigOrBuilder>
getAttributesConfigFieldBuilder() {
if (attributesConfigBuilder_ == null) {
attributesConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2beta.AttributesConfig,
com.google.cloud.retail.v2beta.AttributesConfig.Builder,
com.google.cloud.retail.v2beta.AttributesConfigOrBuilder>(
getAttributesConfig(), getParentForChildren(), isClean());
attributesConfig_ = null;
}
return attributesConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Indicates which fields in the provided
* [AttributesConfig][google.cloud.retail.v2beta.AttributesConfig] to update.
* The following is the only supported field:
*
* * [AttributesConfig.catalog_attributes][google.cloud.retail.v2beta.AttributesConfig.catalog_attributes]
*
* If not set, all supported fields are updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2beta.UpdateAttributesConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2beta.UpdateAttributesConfigRequest)
private static final com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest();
}
public static com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateAttributesConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateAttributesConfigRequest>() {
@java.lang.Override
public UpdateAttributesConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateAttributesConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateAttributesConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.UpdateAttributesConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-api-java-client-services | 38,441 | clients/google-api-services-discoveryengine/v1beta/2.0.0/com/google/api/services/discoveryengine/v1beta/model/GoogleCloudDiscoveryengineV1betaUserEvent.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.discoveryengine.v1beta.model;
/**
* UserEvent captures all metadata information Discovery Engine API needs to know about how end
* users interact with your website.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Discovery Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudDiscoveryengineV1betaUserEvent extends com.google.api.client.json.GenericJson {
/**
* Extra user event features to include in the recommendation model. These attributes must NOT
* contain data that needs to be parsed or processed further, e.g. JSON or other encodings. If you
* provide custom attributes for ingested user events, also include them in the user events that
* you associate with prediction requests. Custom attribute formatting must be consistent between
* imported events and events provided with prediction requests. This lets the Discovery Engine
* API use those custom attributes when training models and serving predictions, which helps
* improve recommendation quality. This field needs to pass all below criteria, otherwise an
* `INVALID_ARGUMENT` error is returned: * The key must be a UTF-8 encoded string with a length
* limit of 5,000 characters. * For text attributes, at most 400 values are allowed. Empty values
* are not allowed. Each value must be a UTF-8 encoded string with a length limit of 256
* characters. * For number attributes, at most 400 values are allowed. For product
* recommendations, an example of extra user information is `traffic_channel`, which is how a user
* arrives at the site. Users can arrive at the site by coming to the site directly, coming
* through Google search, or in other ways.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, GoogleCloudDiscoveryengineV1betaCustomAttribute> attributes;
static {
// hack to force ProGuard to consider GoogleCloudDiscoveryengineV1betaCustomAttribute used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(GoogleCloudDiscoveryengineV1betaCustomAttribute.class);
}
/**
* Token to attribute an API response to user action(s) to trigger the event. Highly recommended
* for user events that are the result of RecommendationService.Recommend. This field enables
* accurate attribution of recommendation model performance. The value must be one of: *
* RecommendResponse.attribution_token for events that are the result of
* RecommendationService.Recommend. * SearchResponse.attribution_token for events that are the
* result of SearchService.Search. This token enables us to accurately attribute page view or
* conversion completion back to the event and the particular predict response containing this
* clicked/purchased product. If user clicks on product K in the recommendation results, pass
* RecommendResponse.attribution_token as a URL parameter to product K's page. When recording
* events on product K's page, log the RecommendResponse.attribution_token to this field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String attributionToken;
/**
* CompletionService.CompleteQuery details related to the event. This field should be set for
* `search` event when autocomplete function is enabled and the user clicks a suggestion for
* search.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1betaCompletionInfo completionInfo;
/**
* Optional. Conversion type. Required if UserEvent.event_type is `conversion`. This is a
* customer-defined conversion name in lowercase letters or numbers separated by "-", such as
* "watch", "good-visit" etc. Do not set the field if UserEvent.event_type is not `conversion`.
* This mixes the custom conversion event with predefined events like `search`, `view-item` etc.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String conversionType;
/**
* The DataStore resource full name, of the form `projects/{project}/locations/{location}/collecti
* ons/{collection_id}/dataStores/{data_store_id}`. Optional. Only required for user events whose
* data store can't by determined by UserEvent.engine or UserEvent.documents. If data store is set
* in the parent of write/import/collect user event requests, this field can be omitted.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String dataStore;
/**
* Should set to true if the request is made directly from the end user, in which case the
* UserEvent.user_info.user_agent can be populated from the HTTP request. This flag should be set
* only if the API request is made directly from the end user such as a mobile app (and not if a
* gateway or a server is processing and pushing the user events). This should not be set when
* using the JavaScript tag in UserEventService.CollectUserEvent.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean directUserRequest;
/**
* List of Documents associated with this user event. This field is optional except for the
* following event types: * `view-item` * `add-to-cart` * `purchase` * `media-play` * `media-
* complete` In a `search` event, this field represents the documents returned to the end user on
* the current page (the end user may have not finished browsing the whole page yet). When a new
* page is returned to the end user, after pagination/filtering/ordering even for the same query,
* a new `search` event with different UserEvent.documents is desired.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GoogleCloudDiscoveryengineV1betaDocumentInfo> documents;
static {
// hack to force ProGuard to consider GoogleCloudDiscoveryengineV1betaDocumentInfo used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(GoogleCloudDiscoveryengineV1betaDocumentInfo.class);
}
/**
* The Engine resource name, in the form of
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* Optional. Only required for Engine produced user events. For example, user events from blended
* search.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String engine;
/**
* Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event
* happened.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String eventTime;
/**
* Required. User event type. Allowed values are: Generic values: * `search`: Search for
* Documents. * `view-item`: Detailed page view of a Document. * `view-item-list`: View of a panel
* or ordered list of Documents. * `view-home-page`: View of the home page. * `view-category-
* page`: View of a category page, e.g. Home > Men > Jeans Retail-related values: * `add-to-cart`:
* Add an item(s) to cart, e.g. in Retail online shopping * `purchase`: Purchase an item(s) Media-
* related values: * `media-play`: Start/resume watching a video, playing a song, etc. * `media-
* complete`: Finished or stopped midway through a video, song, etc. Custom conversion value: *
* `conversion`: Customer defined conversion event.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String eventType;
/**
* The filter syntax consists of an expression language for constructing a predicate from one or
* more fields of the documents being filtered. One example is for `search` events, the associated
* SearchRequest may contain a filter expression in SearchRequest.filter conforming to
* https://google.aip.dev/160#filtering. Similarly, for `view-item-list` events that are generated
* from a RecommendRequest, this field may be populated directly from RecommendRequest.filter
* conforming to https://google.aip.dev/160#filtering. The value must be a UTF-8 encoded string
* with a length limit of 1,000 characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String filter;
/**
* Media-specific info.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1betaMediaInfo mediaInfo;
/**
* Page metadata such as categories and other critical information for certain event types such as
* `view-category-page`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1betaPageInfo pageInfo;
/**
* Panel metadata associated with this user event.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1betaPanelInfo panel;
/**
* Optional. List of panels associated with this event. Used for page-level impression data.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GoogleCloudDiscoveryengineV1betaPanelInfo> panels;
static {
// hack to force ProGuard to consider GoogleCloudDiscoveryengineV1betaPanelInfo used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(GoogleCloudDiscoveryengineV1betaPanelInfo.class);
}
/**
* The promotion IDs if this is an event associated with promotions. Currently, this field is
* restricted to at most one ID.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> promotionIds;
/**
* SearchService.Search details related to the event. This field should be set for `search` event.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1betaSearchInfo searchInfo;
/**
* A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session
* is an aggregation of an end user behavior in a time span. A general guideline to populate the
* session_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The
* session_id should be unique across users, suggest use uuid or add UserEvent.user_pseudo_id as
* prefix.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sessionId;
/**
* A list of identifiers for the independent experiment groups this user event belongs to. This is
* used to distinguish between user events associated with different experiment setups.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> tagIds;
/**
* The transaction metadata (if any) associated with this user event.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1betaTransactionInfo transactionInfo;
/**
* Information about the end user.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1betaUserInfo userInfo;
/**
* Required. A unique identifier for tracking visitors. For example, this could be implemented
* with an HTTP cookie, which should be able to uniquely identify a visitor on a single device.
* This unique identifier should not change if the visitor log in/out of the website. Do not set
* the field to the same fixed ID for different users. This mixes the event history of those users
* together, which results in degraded model quality. The field must be a UTF-8 encoded string
* with a length limit of 128 characters. Otherwise, an `INVALID_ARGUMENT` error is returned. The
* field should not contain PII or user-data. We recommend to use Google Analytics [Client
* ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field-
* reference#clientId) for this field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String userPseudoId;
/**
* Extra user event features to include in the recommendation model. These attributes must NOT
* contain data that needs to be parsed or processed further, e.g. JSON or other encodings. If you
* provide custom attributes for ingested user events, also include them in the user events that
* you associate with prediction requests. Custom attribute formatting must be consistent between
* imported events and events provided with prediction requests. This lets the Discovery Engine
* API use those custom attributes when training models and serving predictions, which helps
* improve recommendation quality. This field needs to pass all below criteria, otherwise an
* `INVALID_ARGUMENT` error is returned: * The key must be a UTF-8 encoded string with a length
* limit of 5,000 characters. * For text attributes, at most 400 values are allowed. Empty values
* are not allowed. Each value must be a UTF-8 encoded string with a length limit of 256
* characters. * For number attributes, at most 400 values are allowed. For product
* recommendations, an example of extra user information is `traffic_channel`, which is how a user
* arrives at the site. Users can arrive at the site by coming to the site directly, coming
* through Google search, or in other ways.
* @return value or {@code null} for none
*/
public java.util.Map<String, GoogleCloudDiscoveryengineV1betaCustomAttribute> getAttributes() {
return attributes;
}
/**
* Extra user event features to include in the recommendation model. These attributes must NOT
* contain data that needs to be parsed or processed further, e.g. JSON or other encodings. If you
* provide custom attributes for ingested user events, also include them in the user events that
* you associate with prediction requests. Custom attribute formatting must be consistent between
* imported events and events provided with prediction requests. This lets the Discovery Engine
* API use those custom attributes when training models and serving predictions, which helps
* improve recommendation quality. This field needs to pass all below criteria, otherwise an
* `INVALID_ARGUMENT` error is returned: * The key must be a UTF-8 encoded string with a length
* limit of 5,000 characters. * For text attributes, at most 400 values are allowed. Empty values
* are not allowed. Each value must be a UTF-8 encoded string with a length limit of 256
* characters. * For number attributes, at most 400 values are allowed. For product
* recommendations, an example of extra user information is `traffic_channel`, which is how a user
* arrives at the site. Users can arrive at the site by coming to the site directly, coming
* through Google search, or in other ways.
* @param attributes attributes or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setAttributes(java.util.Map<String, GoogleCloudDiscoveryengineV1betaCustomAttribute> attributes) {
this.attributes = attributes;
return this;
}
/**
* Token to attribute an API response to user action(s) to trigger the event. Highly recommended
* for user events that are the result of RecommendationService.Recommend. This field enables
* accurate attribution of recommendation model performance. The value must be one of: *
* RecommendResponse.attribution_token for events that are the result of
* RecommendationService.Recommend. * SearchResponse.attribution_token for events that are the
* result of SearchService.Search. This token enables us to accurately attribute page view or
* conversion completion back to the event and the particular predict response containing this
* clicked/purchased product. If user clicks on product K in the recommendation results, pass
* RecommendResponse.attribution_token as a URL parameter to product K's page. When recording
* events on product K's page, log the RecommendResponse.attribution_token to this field.
* @return value or {@code null} for none
*/
public java.lang.String getAttributionToken() {
return attributionToken;
}
/**
* Token to attribute an API response to user action(s) to trigger the event. Highly recommended
* for user events that are the result of RecommendationService.Recommend. This field enables
* accurate attribution of recommendation model performance. The value must be one of: *
* RecommendResponse.attribution_token for events that are the result of
* RecommendationService.Recommend. * SearchResponse.attribution_token for events that are the
* result of SearchService.Search. This token enables us to accurately attribute page view or
* conversion completion back to the event and the particular predict response containing this
* clicked/purchased product. If user clicks on product K in the recommendation results, pass
* RecommendResponse.attribution_token as a URL parameter to product K's page. When recording
* events on product K's page, log the RecommendResponse.attribution_token to this field.
* @param attributionToken attributionToken or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setAttributionToken(java.lang.String attributionToken) {
this.attributionToken = attributionToken;
return this;
}
/**
* CompletionService.CompleteQuery details related to the event. This field should be set for
* `search` event when autocomplete function is enabled and the user clicks a suggestion for
* search.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaCompletionInfo getCompletionInfo() {
return completionInfo;
}
/**
* CompletionService.CompleteQuery details related to the event. This field should be set for
* `search` event when autocomplete function is enabled and the user clicks a suggestion for
* search.
* @param completionInfo completionInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setCompletionInfo(GoogleCloudDiscoveryengineV1betaCompletionInfo completionInfo) {
this.completionInfo = completionInfo;
return this;
}
/**
* Optional. Conversion type. Required if UserEvent.event_type is `conversion`. This is a
* customer-defined conversion name in lowercase letters or numbers separated by "-", such as
* "watch", "good-visit" etc. Do not set the field if UserEvent.event_type is not `conversion`.
* This mixes the custom conversion event with predefined events like `search`, `view-item` etc.
* @return value or {@code null} for none
*/
public java.lang.String getConversionType() {
return conversionType;
}
/**
* Optional. Conversion type. Required if UserEvent.event_type is `conversion`. This is a
* customer-defined conversion name in lowercase letters or numbers separated by "-", such as
* "watch", "good-visit" etc. Do not set the field if UserEvent.event_type is not `conversion`.
* This mixes the custom conversion event with predefined events like `search`, `view-item` etc.
* @param conversionType conversionType or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setConversionType(java.lang.String conversionType) {
this.conversionType = conversionType;
return this;
}
/**
* The DataStore resource full name, of the form `projects/{project}/locations/{location}/collecti
* ons/{collection_id}/dataStores/{data_store_id}`. Optional. Only required for user events whose
* data store can't by determined by UserEvent.engine or UserEvent.documents. If data store is set
* in the parent of write/import/collect user event requests, this field can be omitted.
* @return value or {@code null} for none
*/
public java.lang.String getDataStore() {
return dataStore;
}
/**
* The DataStore resource full name, of the form `projects/{project}/locations/{location}/collecti
* ons/{collection_id}/dataStores/{data_store_id}`. Optional. Only required for user events whose
* data store can't by determined by UserEvent.engine or UserEvent.documents. If data store is set
* in the parent of write/import/collect user event requests, this field can be omitted.
* @param dataStore dataStore or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setDataStore(java.lang.String dataStore) {
this.dataStore = dataStore;
return this;
}
/**
* Should set to true if the request is made directly from the end user, in which case the
* UserEvent.user_info.user_agent can be populated from the HTTP request. This flag should be set
* only if the API request is made directly from the end user such as a mobile app (and not if a
* gateway or a server is processing and pushing the user events). This should not be set when
* using the JavaScript tag in UserEventService.CollectUserEvent.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDirectUserRequest() {
return directUserRequest;
}
/**
* Should set to true if the request is made directly from the end user, in which case the
* UserEvent.user_info.user_agent can be populated from the HTTP request. This flag should be set
* only if the API request is made directly from the end user such as a mobile app (and not if a
* gateway or a server is processing and pushing the user events). This should not be set when
* using the JavaScript tag in UserEventService.CollectUserEvent.
* @param directUserRequest directUserRequest or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setDirectUserRequest(java.lang.Boolean directUserRequest) {
this.directUserRequest = directUserRequest;
return this;
}
/**
* List of Documents associated with this user event. This field is optional except for the
* following event types: * `view-item` * `add-to-cart` * `purchase` * `media-play` * `media-
* complete` In a `search` event, this field represents the documents returned to the end user on
* the current page (the end user may have not finished browsing the whole page yet). When a new
* page is returned to the end user, after pagination/filtering/ordering even for the same query,
* a new `search` event with different UserEvent.documents is desired.
* @return value or {@code null} for none
*/
public java.util.List<GoogleCloudDiscoveryengineV1betaDocumentInfo> getDocuments() {
return documents;
}
/**
* List of Documents associated with this user event. This field is optional except for the
* following event types: * `view-item` * `add-to-cart` * `purchase` * `media-play` * `media-
* complete` In a `search` event, this field represents the documents returned to the end user on
* the current page (the end user may have not finished browsing the whole page yet). When a new
* page is returned to the end user, after pagination/filtering/ordering even for the same query,
* a new `search` event with different UserEvent.documents is desired.
* @param documents documents or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setDocuments(java.util.List<GoogleCloudDiscoveryengineV1betaDocumentInfo> documents) {
this.documents = documents;
return this;
}
/**
* The Engine resource name, in the form of
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* Optional. Only required for Engine produced user events. For example, user events from blended
* search.
* @return value or {@code null} for none
*/
public java.lang.String getEngine() {
return engine;
}
/**
* The Engine resource name, in the form of
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* Optional. Only required for Engine produced user events. For example, user events from blended
* search.
* @param engine engine or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setEngine(java.lang.String engine) {
this.engine = engine;
return this;
}
/**
* Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event
* happened.
* @return value or {@code null} for none
*/
public String getEventTime() {
return eventTime;
}
/**
* Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event
* happened.
* @param eventTime eventTime or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setEventTime(String eventTime) {
this.eventTime = eventTime;
return this;
}
/**
* Required. User event type. Allowed values are: Generic values: * `search`: Search for
* Documents. * `view-item`: Detailed page view of a Document. * `view-item-list`: View of a panel
* or ordered list of Documents. * `view-home-page`: View of the home page. * `view-category-
* page`: View of a category page, e.g. Home > Men > Jeans Retail-related values: * `add-to-cart`:
* Add an item(s) to cart, e.g. in Retail online shopping * `purchase`: Purchase an item(s) Media-
* related values: * `media-play`: Start/resume watching a video, playing a song, etc. * `media-
* complete`: Finished or stopped midway through a video, song, etc. Custom conversion value: *
* `conversion`: Customer defined conversion event.
* @return value or {@code null} for none
*/
public java.lang.String getEventType() {
return eventType;
}
/**
* Required. User event type. Allowed values are: Generic values: * `search`: Search for
* Documents. * `view-item`: Detailed page view of a Document. * `view-item-list`: View of a panel
* or ordered list of Documents. * `view-home-page`: View of the home page. * `view-category-
* page`: View of a category page, e.g. Home > Men > Jeans Retail-related values: * `add-to-cart`:
* Add an item(s) to cart, e.g. in Retail online shopping * `purchase`: Purchase an item(s) Media-
* related values: * `media-play`: Start/resume watching a video, playing a song, etc. * `media-
* complete`: Finished or stopped midway through a video, song, etc. Custom conversion value: *
* `conversion`: Customer defined conversion event.
* @param eventType eventType or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setEventType(java.lang.String eventType) {
this.eventType = eventType;
return this;
}
/**
* The filter syntax consists of an expression language for constructing a predicate from one or
* more fields of the documents being filtered. One example is for `search` events, the associated
* SearchRequest may contain a filter expression in SearchRequest.filter conforming to
* https://google.aip.dev/160#filtering. Similarly, for `view-item-list` events that are generated
* from a RecommendRequest, this field may be populated directly from RecommendRequest.filter
* conforming to https://google.aip.dev/160#filtering. The value must be a UTF-8 encoded string
* with a length limit of 1,000 characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* @return value or {@code null} for none
*/
public java.lang.String getFilter() {
return filter;
}
/**
* The filter syntax consists of an expression language for constructing a predicate from one or
* more fields of the documents being filtered. One example is for `search` events, the associated
* SearchRequest may contain a filter expression in SearchRequest.filter conforming to
* https://google.aip.dev/160#filtering. Similarly, for `view-item-list` events that are generated
* from a RecommendRequest, this field may be populated directly from RecommendRequest.filter
* conforming to https://google.aip.dev/160#filtering. The value must be a UTF-8 encoded string
* with a length limit of 1,000 characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* @param filter filter or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setFilter(java.lang.String filter) {
this.filter = filter;
return this;
}
/**
* Media-specific info.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaMediaInfo getMediaInfo() {
return mediaInfo;
}
/**
* Media-specific info.
* @param mediaInfo mediaInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setMediaInfo(GoogleCloudDiscoveryengineV1betaMediaInfo mediaInfo) {
this.mediaInfo = mediaInfo;
return this;
}
/**
* Page metadata such as categories and other critical information for certain event types such as
* `view-category-page`.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaPageInfo getPageInfo() {
return pageInfo;
}
/**
* Page metadata such as categories and other critical information for certain event types such as
* `view-category-page`.
* @param pageInfo pageInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setPageInfo(GoogleCloudDiscoveryengineV1betaPageInfo pageInfo) {
this.pageInfo = pageInfo;
return this;
}
/**
* Panel metadata associated with this user event.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaPanelInfo getPanel() {
return panel;
}
/**
* Panel metadata associated with this user event.
* @param panel panel or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setPanel(GoogleCloudDiscoveryengineV1betaPanelInfo panel) {
this.panel = panel;
return this;
}
/**
* Optional. List of panels associated with this event. Used for page-level impression data.
* @return value or {@code null} for none
*/
public java.util.List<GoogleCloudDiscoveryengineV1betaPanelInfo> getPanels() {
return panels;
}
/**
* Optional. List of panels associated with this event. Used for page-level impression data.
* @param panels panels or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setPanels(java.util.List<GoogleCloudDiscoveryengineV1betaPanelInfo> panels) {
this.panels = panels;
return this;
}
/**
* The promotion IDs if this is an event associated with promotions. Currently, this field is
* restricted to at most one ID.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPromotionIds() {
return promotionIds;
}
/**
* The promotion IDs if this is an event associated with promotions. Currently, this field is
* restricted to at most one ID.
* @param promotionIds promotionIds or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setPromotionIds(java.util.List<java.lang.String> promotionIds) {
this.promotionIds = promotionIds;
return this;
}
/**
* SearchService.Search details related to the event. This field should be set for `search` event.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaSearchInfo getSearchInfo() {
return searchInfo;
}
/**
* SearchService.Search details related to the event. This field should be set for `search` event.
* @param searchInfo searchInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setSearchInfo(GoogleCloudDiscoveryengineV1betaSearchInfo searchInfo) {
this.searchInfo = searchInfo;
return this;
}
/**
* A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session
* is an aggregation of an end user behavior in a time span. A general guideline to populate the
* session_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The
* session_id should be unique across users, suggest use uuid or add UserEvent.user_pseudo_id as
* prefix.
* @return value or {@code null} for none
*/
public java.lang.String getSessionId() {
return sessionId;
}
/**
* A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session
* is an aggregation of an end user behavior in a time span. A general guideline to populate the
* session_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The
* session_id should be unique across users, suggest use uuid or add UserEvent.user_pseudo_id as
* prefix.
* @param sessionId sessionId or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setSessionId(java.lang.String sessionId) {
this.sessionId = sessionId;
return this;
}
/**
* A list of identifiers for the independent experiment groups this user event belongs to. This is
* used to distinguish between user events associated with different experiment setups.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getTagIds() {
return tagIds;
}
/**
* A list of identifiers for the independent experiment groups this user event belongs to. This is
* used to distinguish between user events associated with different experiment setups.
* @param tagIds tagIds or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setTagIds(java.util.List<java.lang.String> tagIds) {
this.tagIds = tagIds;
return this;
}
/**
* The transaction metadata (if any) associated with this user event.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaTransactionInfo getTransactionInfo() {
return transactionInfo;
}
/**
* The transaction metadata (if any) associated with this user event.
* @param transactionInfo transactionInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setTransactionInfo(GoogleCloudDiscoveryengineV1betaTransactionInfo transactionInfo) {
this.transactionInfo = transactionInfo;
return this;
}
/**
* Information about the end user.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserInfo getUserInfo() {
return userInfo;
}
/**
* Information about the end user.
* @param userInfo userInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setUserInfo(GoogleCloudDiscoveryengineV1betaUserInfo userInfo) {
this.userInfo = userInfo;
return this;
}
/**
* Required. A unique identifier for tracking visitors. For example, this could be implemented
* with an HTTP cookie, which should be able to uniquely identify a visitor on a single device.
* This unique identifier should not change if the visitor log in/out of the website. Do not set
* the field to the same fixed ID for different users. This mixes the event history of those users
* together, which results in degraded model quality. The field must be a UTF-8 encoded string
* with a length limit of 128 characters. Otherwise, an `INVALID_ARGUMENT` error is returned. The
* field should not contain PII or user-data. We recommend to use Google Analytics [Client
* ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field-
* reference#clientId) for this field.
* @return value or {@code null} for none
*/
public java.lang.String getUserPseudoId() {
return userPseudoId;
}
/**
* Required. A unique identifier for tracking visitors. For example, this could be implemented
* with an HTTP cookie, which should be able to uniquely identify a visitor on a single device.
* This unique identifier should not change if the visitor log in/out of the website. Do not set
* the field to the same fixed ID for different users. This mixes the event history of those users
* together, which results in degraded model quality. The field must be a UTF-8 encoded string
* with a length limit of 128 characters. Otherwise, an `INVALID_ARGUMENT` error is returned. The
* field should not contain PII or user-data. We recommend to use Google Analytics [Client
* ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field-
* reference#clientId) for this field.
* @param userPseudoId userPseudoId or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1betaUserEvent setUserPseudoId(java.lang.String userPseudoId) {
this.userPseudoId = userPseudoId;
return this;
}
@Override
public GoogleCloudDiscoveryengineV1betaUserEvent set(String fieldName, Object value) {
return (GoogleCloudDiscoveryengineV1betaUserEvent) super.set(fieldName, value);
}
@Override
public GoogleCloudDiscoveryengineV1betaUserEvent clone() {
return (GoogleCloudDiscoveryengineV1betaUserEvent) super.clone();
}
}
|
apache/gobblin | 38,444 | gobblin-data-management/src/test/java/org/apache/gobblin/data/management/copy/iceberg/IcebergDatasetTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.data.management.copy.iceberg;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableOperations;
import org.apache.iceberg.catalog.TableIdentifier;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.testng.collections.Sets;
import com.google.api.client.util.Maps;
import com.google.common.collect.Lists;
import com.google.common.collect.Streams;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import lombok.Data;
import org.apache.gobblin.data.management.copy.CopyConfiguration;
import org.apache.gobblin.data.management.copy.CopyContext;
import org.apache.gobblin.data.management.copy.CopyEntity;
import org.apache.gobblin.data.management.copy.PreserveAttributes;
import org.apache.gobblin.dataset.DatasetConstants;
import org.apache.gobblin.dataset.DatasetDescriptor;
import static org.mockito.Mockito.any;
/** Tests for {@link org.apache.gobblin.data.management.copy.iceberg.IcebergDataset} */
public class IcebergDatasetTest {
private static final URI SRC_FS_URI;
private static final URI DEST_FS_URI;
static {
try {
SRC_FS_URI = new URI("abc", "the.source.org", "/", null);
DEST_FS_URI = new URI("xyz", "the.dest.org", "/", null);
} catch (URISyntaxException e) {
throw new RuntimeException("should not occur!", e);
}
}
private static final String ROOT_PATH = "/root/iceberg/test/";
private static final String METADATA_PATH = ROOT_PATH + "metadata/metadata.json";
private static final String MANIFEST_LIST_PATH_0 = ROOT_PATH + "metadata/manifest_list.x";
private static final String MANIFEST_PATH_0 = ROOT_PATH + "metadata/manifest.a";
private static final String MANIFEST_DATA_PATH_0A = ROOT_PATH + "data/p0/a";
private static final String MANIFEST_DATA_PATH_0B = ROOT_PATH + "data/p0/b";
private static final String REGISTER_COMMIT_STEP = IcebergRegisterStep.class.getName();
private static final MockIcebergTable.SnapshotPaths SNAPSHOT_PATHS_0 =
new MockIcebergTable.SnapshotPaths(Optional.of(METADATA_PATH), MANIFEST_LIST_PATH_0, Arrays.asList(
new IcebergSnapshotInfo.ManifestFileInfo(MANIFEST_PATH_0,
Arrays.asList(MANIFEST_DATA_PATH_0A, MANIFEST_DATA_PATH_0B))));
private static final String MANIFEST_LIST_PATH_1 = MANIFEST_LIST_PATH_0.replaceAll("\\.x$", ".y");
private static final String MANIFEST_PATH_1 = MANIFEST_PATH_0.replaceAll("\\.a$", ".b");
private static final String MANIFEST_DATA_PATH_1A = MANIFEST_DATA_PATH_0A.replaceAll("/p0/", "/p1/");
private static final String MANIFEST_DATA_PATH_1B = MANIFEST_DATA_PATH_0B.replaceAll("/p0/", "/p1/");
private static final MockIcebergTable.SnapshotPaths SNAPSHOT_PATHS_1 =
new MockIcebergTable.SnapshotPaths(Optional.empty(), MANIFEST_LIST_PATH_1, Arrays.asList(
new IcebergSnapshotInfo.ManifestFileInfo(MANIFEST_PATH_1,
Arrays.asList(MANIFEST_DATA_PATH_1A, MANIFEST_DATA_PATH_1B))));
private final String testDbName = "test_db_name";
private final String testTblName = "test_tbl_name";
public static final String SRC_CATALOG_URI = "abc://the.source.org/catalog";
private final Properties copyConfigProperties = new Properties();
@BeforeClass
public void setUp() throws Exception {
copyConfigProperties.setProperty("data.publisher.final.dir", "/test");
}
@Test
public void testGetDatasetDescriptor() throws URISyntaxException {
TableIdentifier tableId = TableIdentifier.of(testDbName, testTblName);
String qualifiedTableName = "foo_prefix." + tableId.toString();
String platformName = "Floe";
IcebergTable table = new IcebergTable(tableId, qualifiedTableName, platformName,
Mockito.mock(TableOperations.class),
SRC_CATALOG_URI,
Mockito.mock(Table.class));
FileSystem mockFs = Mockito.mock(FileSystem.class);
Mockito.when(mockFs.getUri()).thenReturn(SRC_FS_URI);
DatasetDescriptor expected = new DatasetDescriptor(platformName, URI.create(SRC_CATALOG_URI), qualifiedTableName);
expected.addMetadata(DatasetConstants.FS_URI, SRC_FS_URI.toString());
Assert.assertEquals(table.getDatasetDescriptor(mockFs), expected);
}
@Test
public void testGetFilePathsWhenDestEmpty() throws IOException {
List<MockIcebergTable.SnapshotPaths> icebergSnapshots = Lists.newArrayList(SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0);
List<String> existingDestPaths = Lists.newArrayList();
boolean shouldIncludeMetadataPath = true;
Set<Path> expectedResultPaths = withAllSnapshotPaths(Sets.newHashSet(), shouldIncludeMetadataPath, SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0);
validateGetFilePathsGivenDestState(icebergSnapshots, existingDestPaths, expectedResultPaths, shouldIncludeMetadataPath);
}
@Test
public void testGetFilePathsWhenOneManifestListAtDest() throws IOException {
List<MockIcebergTable.SnapshotPaths> icebergSnapshots = Lists.newArrayList(SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0);
List<String> existingDestPaths = Lists.newArrayList(MANIFEST_LIST_PATH_1);
boolean shouldIncludeMetadataPath = true;
Set<Path> expectedResultPaths = withAllSnapshotPaths(Sets.newHashSet(), shouldIncludeMetadataPath, SNAPSHOT_PATHS_0);
validateGetFilePathsGivenDestState(icebergSnapshots, existingDestPaths, expectedResultPaths, shouldIncludeMetadataPath);
}
@Test
public void testGetFilePathsWhenOneManifestAtDest() throws IOException {
List<MockIcebergTable.SnapshotPaths> icebergSnapshots = Lists.newArrayList(SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0);
List<String> existingDestPaths = Lists.newArrayList(MANIFEST_PATH_1);
boolean shouldIncludeMetadataPath = false;
Set<Path> expectedResultPaths = withAllSnapshotPaths(Sets.newHashSet(), shouldIncludeMetadataPath, SNAPSHOT_PATHS_0);
expectedResultPaths.add(new Path(MANIFEST_LIST_PATH_1)); // expect manifest's parent, despite manifest subtree skip
validateGetFilePathsGivenDestState(icebergSnapshots, existingDestPaths, expectedResultPaths, shouldIncludeMetadataPath);
}
@Test
public void testGetFilePathsWhenSomeDataFilesAtDest() throws IOException {
List<MockIcebergTable.SnapshotPaths> icebergSnapshots = Lists.newArrayList(SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0);
List<String> existingDestPaths = Lists.newArrayList(MANIFEST_DATA_PATH_1B, MANIFEST_DATA_PATH_0A);
boolean shouldIncludeMetadataPath = true;
Set<Path> expectedResultPaths = withAllSnapshotPaths(Sets.newHashSet(), shouldIncludeMetadataPath, SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0);
// despite already existing on target, expect anyway: per-file check skipped for optimization's sake
// expectedResultPaths.remove(new Path(MANIFEST_DATA_PATH_1B));
// expectedResultPaths.remove(new Path(MANIFEST_DATA_PATH_0A));
validateGetFilePathsGivenDestState(icebergSnapshots, existingDestPaths, expectedResultPaths, shouldIncludeMetadataPath);
}
@Test
public void testGetFilePathsWillSkipMissingSourceFile() throws IOException {
List<MockIcebergTable.SnapshotPaths> icebergSnapshots = Lists.newArrayList(SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0);
// pretend this path doesn't exist on source:
Path missingPath = new Path(MANIFEST_DATA_PATH_0A);
boolean shouldIncludeMetadataPath = false;
Set<Path> existingSourcePaths = withAllSnapshotPaths(Sets.newHashSet(), shouldIncludeMetadataPath, SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0);
existingSourcePaths.remove(missingPath);
List<String> existingDestPaths = Lists.newArrayList(MANIFEST_LIST_PATH_1);
Set<Path> expectedResultPaths = withAllSnapshotPaths(Sets.newHashSet(), shouldIncludeMetadataPath, SNAPSHOT_PATHS_0);
expectedResultPaths.remove(missingPath);
validateGetFilePathsGivenDestState(icebergSnapshots,
Optional.of(existingSourcePaths.stream().map(Path::toString).collect(Collectors.toList())), existingDestPaths,
expectedResultPaths, shouldIncludeMetadataPath);
}
@Test
public void testGetFilePathsWhenManifestListsAtDestButNotMetadata() throws IOException {
List<MockIcebergTable.SnapshotPaths> icebergSnapshots = Lists.newArrayList(SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0);
List<String> existingDestPaths = Lists.newArrayList(MANIFEST_LIST_PATH_1, MANIFEST_LIST_PATH_0);
boolean shouldIncludeMetadataPath = true;
Set<Path> expectedResultPaths = Sets.newHashSet();
expectedResultPaths.add(new Path(METADATA_PATH));
validateGetFilePathsGivenDestState(icebergSnapshots, existingDestPaths, expectedResultPaths, shouldIncludeMetadataPath);
}
@Test
public void testGetFilePathsWhenManifestListsAtDestButNotMetadataYetThatIgnored() throws IOException {
List<MockIcebergTable.SnapshotPaths> icebergSnapshots = Lists.newArrayList(SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0);
List<String> existingDestPaths = Lists.newArrayList(MANIFEST_LIST_PATH_1, MANIFEST_LIST_PATH_0);
boolean shouldIncludeMetadataPath = false;
Set<Path> expectedResultPaths = Sets.newHashSet(); // nothing expected!
validateGetFilePathsGivenDestState(icebergSnapshots, existingDestPaths, expectedResultPaths, shouldIncludeMetadataPath);
}
@Test
public void testGetFilePathsWhenAllAtDest() throws IOException {
List<MockIcebergTable.SnapshotPaths> icebergSnapshots = Lists.newArrayList(SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0);
List<String> existingDestPaths = Lists.newArrayList(METADATA_PATH, MANIFEST_LIST_PATH_1, MANIFEST_LIST_PATH_0);
boolean shouldIncludeMetadataPath = true;
Set<Path> expectedResultPaths = Sets.newHashSet(); // not expecting any delta
IcebergTable mockTable =
validateGetFilePathsGivenDestState(icebergSnapshots, existingDestPaths, expectedResultPaths, shouldIncludeMetadataPath);
// ensure short-circuiting was able to avert iceberg manifests scan
Mockito.verify(mockTable, Mockito.times(1)).getCurrentSnapshotInfoOverviewOnly();
Mockito.verify(mockTable, Mockito.times(1)).getTableId();
Mockito.verifyNoMoreInteractions(mockTable);
}
/** Exception wrapping is used internally--ensure that doesn't lapse into silently swallowing errors */
@Test(expectedExceptions = IOException.class)
public void testGetFilePathsDoesNotSwallowDestFileSystemException() throws IOException {
IcebergTable srcIcebergTable = MockIcebergTable.withSnapshots(TableIdentifier.of(testDbName, testTblName), Lists.newArrayList(SNAPSHOT_PATHS_0));
MockFileSystemBuilder sourceFsBuilder = new MockFileSystemBuilder(SRC_FS_URI);
FileSystem sourceFs = sourceFsBuilder.build();
boolean shouldIncludeMetadataPathMakesNoDifference = true;
IcebergDataset icebergDataset = new IcebergDataset(srcIcebergTable, null, new Properties(), sourceFs, shouldIncludeMetadataPathMakesNoDifference);
MockFileSystemBuilder destFsBuilder = new MockFileSystemBuilder(DEST_FS_URI);
FileSystem destFs = destFsBuilder.build();
Mockito.doThrow(new IOException("Ha - not so fast!")).when(destFs).getFileStatus(new Path(SNAPSHOT_PATHS_0.manifestListPath));
CopyConfiguration copyConfiguration = createEmptyCopyConfiguration(destFs);
icebergDataset.getFilePathsToFileStatus(destFs, copyConfiguration, shouldIncludeMetadataPathMakesNoDifference);
}
/** Validate error consolidation used to streamline logging. */
@Test
public void testPathErrorConsolidator() {
IcebergDataset.PathErrorConsolidator pec = IcebergDataset.createPathErrorConsolidator();
Optional<String> msg0 = pec.prepLogMsg(new Path("/a/b/c/file0"));
Assert.assertTrue(msg0.isPresent());
Assert.assertEquals(msg0.get(), "path not found: '/a/b/c/file0'");
Optional<String> msg1 = pec.prepLogMsg(new Path("/a/b/c/file1"));
Assert.assertTrue(msg1.isPresent());
Assert.assertEquals(msg1.get(), "paths not found: '/a/b/c/...'");
Optional<String> msg2 = pec.prepLogMsg(new Path("/a/b/c/file2"));
Assert.assertFalse(msg2.isPresent());
Optional<String> msg3 = pec.prepLogMsg(new Path("/a/b/c-other/file0"));
Assert.assertTrue(msg3.isPresent());
}
/**
* Test case to generate copy entities for all the file paths for a mocked iceberg table.
* The assumption here is that we create copy entities for all the matching file paths,
* without calculating any difference between the source and destination
*/
@Test
public void testGenerateCopyEntitiesWhenDestEmpty() throws IOException {
List<String> expectedPaths = Arrays.asList(METADATA_PATH, MANIFEST_LIST_PATH_0,
MANIFEST_PATH_0, MANIFEST_DATA_PATH_0A, MANIFEST_DATA_PATH_0B);
MockFileSystemBuilder sourceBuilder = new MockFileSystemBuilder(SRC_FS_URI);
sourceBuilder.addPaths(expectedPaths);
FileSystem sourceFs = sourceBuilder.build();
TableIdentifier tableIdInCommon = TableIdentifier.of(testDbName, testTblName);
IcebergTable srcIcebergTbl = MockIcebergTable.withSnapshots(tableIdInCommon, Arrays.asList(SNAPSHOT_PATHS_0));
IcebergTable destIcebergTbl = MockIcebergTable.withSnapshots(tableIdInCommon, Arrays.asList(SNAPSHOT_PATHS_1));
boolean shouldIncludeManifestPath = true;
IcebergDataset icebergDataset = new TrickIcebergDataset(srcIcebergTbl, destIcebergTbl, new Properties(), sourceFs, shouldIncludeManifestPath);
MockFileSystemBuilder destBuilder = new MockFileSystemBuilder(DEST_FS_URI);
FileSystem destFs = destBuilder.build();
CopyConfiguration copyConfiguration =
CopyConfiguration.builder(destFs, copyConfigProperties).preserve(PreserveAttributes.fromMnemonicString(""))
.copyContext(new CopyContext()).build();
Collection<CopyEntity> copyEntities = icebergDataset.generateCopyEntities(destFs, copyConfiguration);
verifyCopyEntities(copyEntities, expectedPaths);
}
/** Test generating copy entities for a multi-snapshot iceberg; given empty dest, src-dest delta will be entirety */
@Test
public void testGenerateCopyEntitiesMultiSnapshotWhenDestEmpty() throws IOException {
List<String> expectedPaths = Arrays.asList( // METADATA_PATH,
MANIFEST_LIST_PATH_0, MANIFEST_PATH_0, MANIFEST_DATA_PATH_0A, MANIFEST_DATA_PATH_0B,
MANIFEST_LIST_PATH_1, MANIFEST_PATH_1, MANIFEST_DATA_PATH_1A, MANIFEST_DATA_PATH_1B);
MockFileSystemBuilder sourceBuilder = new MockFileSystemBuilder(SRC_FS_URI);
sourceBuilder.addPaths(expectedPaths);
FileSystem sourceFs = sourceBuilder.build();
TableIdentifier tableIdInCommon = TableIdentifier.of(testDbName, testTblName);
IcebergTable srcIcebergTable = MockIcebergTable.withSnapshots(tableIdInCommon, Arrays.asList(SNAPSHOT_PATHS_1, SNAPSHOT_PATHS_0));
IcebergTable destIcebergTable = MockIcebergTable.withSnapshots(tableIdInCommon, Arrays.asList(SNAPSHOT_PATHS_1));
boolean shouldIncludeManifestPath = false;
IcebergDataset icebergDataset = new TrickIcebergDataset(srcIcebergTable, destIcebergTable, new Properties(), sourceFs, shouldIncludeManifestPath);
MockFileSystemBuilder destBuilder = new MockFileSystemBuilder(DEST_FS_URI);
FileSystem destFs = destBuilder.build();
CopyConfiguration copyConfiguration =
CopyConfiguration.builder(destFs, copyConfigProperties).preserve(PreserveAttributes.fromMnemonicString(""))
.copyContext(new CopyContext()).build();
Collection<CopyEntity> copyEntities = icebergDataset.generateCopyEntities(destFs, copyConfiguration);
verifyCopyEntities(copyEntities, expectedPaths);
}
@Test
public void testFsOwnershipAndPermissionPreservationWhenDestEmpty() throws IOException {
FileStatus metadataFileStatus = new FileStatus(0, false, 0, 0, 0, 0, new FsPermission(FsAction.WRITE, FsAction.READ, FsAction.NONE), "metadata_owner", "metadata_group", null);
FileStatus manifestFileStatus = new FileStatus(0, false, 0, 0, 0, 0, new FsPermission(FsAction.WRITE, FsAction.READ, FsAction.NONE), "manifest_list_owner", "manifest_list_group", null);
FileStatus manifestDataFileStatus = new FileStatus(0, false, 0, 0, 0, 0, new FsPermission(FsAction.WRITE_EXECUTE, FsAction.READ_EXECUTE, FsAction.NONE), "manifest_data_owner", "manifest_data_group", null);
Map<String, FileStatus> expectedPathsAndFileStatuses = Maps.newHashMap();
expectedPathsAndFileStatuses.put(METADATA_PATH, metadataFileStatus);
expectedPathsAndFileStatuses.put(MANIFEST_PATH_0, manifestFileStatus);
expectedPathsAndFileStatuses.put(MANIFEST_LIST_PATH_0, manifestFileStatus);
expectedPathsAndFileStatuses.put(MANIFEST_DATA_PATH_0A, manifestDataFileStatus);
expectedPathsAndFileStatuses.put(MANIFEST_DATA_PATH_0B, manifestDataFileStatus);
MockFileSystemBuilder sourceBuilder = new MockFileSystemBuilder(SRC_FS_URI);
sourceBuilder.addPathsAndFileStatuses(expectedPathsAndFileStatuses);
FileSystem sourceFs = sourceBuilder.build();
TableIdentifier tableIdInCommon = TableIdentifier.of(testDbName, testTblName);
IcebergTable srcIcebergTable = MockIcebergTable.withSnapshots(tableIdInCommon, Arrays.asList(SNAPSHOT_PATHS_0));
IcebergTable destIcebergTable = MockIcebergTable.withSnapshots(tableIdInCommon, Arrays.asList(SNAPSHOT_PATHS_1));
boolean shouldIncludeManifestPath = true;
IcebergDataset icebergDataset = new TrickIcebergDataset(srcIcebergTable, destIcebergTable, new Properties(), sourceFs, shouldIncludeManifestPath);
MockFileSystemBuilder destBuilder = new MockFileSystemBuilder(DEST_FS_URI);
FileSystem destFs = destBuilder.build();
CopyConfiguration copyConfiguration =
CopyConfiguration.builder(destFs, copyConfigProperties)
// preserving attributes for owner, group and permissions respectively
.preserve(PreserveAttributes.fromMnemonicString("ugp"))
.copyContext(new CopyContext()).build();
Collection<CopyEntity> copyEntities = icebergDataset.generateCopyEntities(destFs, copyConfiguration);
verifyFsOwnershipAndPermissionPreservation(copyEntities, sourceBuilder.getPathsAndFileStatuses());
}
@Test
public void testFsOwnershipAndPermissionWithoutPreservationWhenDestEmpty() throws IOException {
List<String> expectedPaths = Arrays.asList(METADATA_PATH, MANIFEST_LIST_PATH_0,
MANIFEST_PATH_0, MANIFEST_DATA_PATH_0A, MANIFEST_DATA_PATH_0B);
Map<Path, FileStatus> expectedPathsAndFileStatuses = Maps.newHashMap();
for (String expectedPath : expectedPaths) {
expectedPathsAndFileStatuses.putIfAbsent(new Path(expectedPath), new FileStatus());
}
MockFileSystemBuilder sourceBuilder = new MockFileSystemBuilder(SRC_FS_URI);
sourceBuilder.addPaths(expectedPaths);
FileSystem sourceFs = sourceBuilder.build();
TableIdentifier tableIdInCommon = TableIdentifier.of(testDbName, testTblName);
IcebergTable srcIcebergTable = MockIcebergTable.withSnapshots(tableIdInCommon, Arrays.asList(SNAPSHOT_PATHS_0));
IcebergTable destIcebergTable = MockIcebergTable.withSnapshots(tableIdInCommon, Arrays.asList(SNAPSHOT_PATHS_1));
boolean shouldIncludeManifestPath = true;
IcebergDataset icebergDataset = new TrickIcebergDataset(srcIcebergTable, destIcebergTable, new Properties(), sourceFs, shouldIncludeManifestPath);
MockFileSystemBuilder destBuilder = new MockFileSystemBuilder(DEST_FS_URI);
FileSystem destFs = destBuilder.build();
CopyConfiguration copyConfiguration =
CopyConfiguration.builder(destFs, copyConfigProperties)
// without preserving attributes for owner, group and permissions
.preserve(PreserveAttributes.fromMnemonicString(""))
.copyContext(new CopyContext()).build();
Collection<CopyEntity> copyEntities = icebergDataset.generateCopyEntities(destFs, copyConfiguration);
verifyFsOwnershipAndPermissionPreservation(copyEntities, expectedPathsAndFileStatuses);
}
/**
* exercise {@link IcebergDataset::getFilePaths} and validate the result
* @return {@link IcebergTable} (mock!), for behavioral verification
*/
protected IcebergTable validateGetFilePathsGivenDestState(List<MockIcebergTable.SnapshotPaths> sourceSnapshotPathSets,
List<String> existingDestPaths, Set<Path> expectedResultPaths, boolean shouldIncludeMetadataPath) throws IOException {
return validateGetFilePathsGivenDestState(sourceSnapshotPathSets, Optional.empty(), existingDestPaths,
expectedResultPaths, shouldIncludeMetadataPath);
}
/**
* exercise {@link IcebergDataset::getFilePaths} and validate the result
* @return {@link IcebergTable} (mock!), for behavioral verification
*/
protected IcebergTable validateGetFilePathsGivenDestState(List<MockIcebergTable.SnapshotPaths> sourceSnapshotPathSets,
Optional<List<String>> optExistingSourcePaths, List<String> existingDestPaths, Set<Path> expectedResultPaths,
boolean shouldIncludeMetadataPath) throws IOException {
IcebergTable srcIcebergTable = MockIcebergTable.withSnapshots(TableIdentifier.of(testDbName, testTblName), sourceSnapshotPathSets);
MockFileSystemBuilder sourceFsBuilder = new MockFileSystemBuilder(SRC_FS_URI, !optExistingSourcePaths.isPresent());
optExistingSourcePaths.ifPresent(sourceFsBuilder::addPaths);
FileSystem sourceFs = sourceFsBuilder.build();
IcebergDataset icebergDataset = new IcebergDataset(srcIcebergTable, null, new Properties(), sourceFs, shouldIncludeMetadataPath);
MockFileSystemBuilder destFsBuilder = new MockFileSystemBuilder(DEST_FS_URI);
destFsBuilder.addPaths(existingDestPaths);
FileSystem destFs = destFsBuilder.build();
CopyConfiguration copyConfiguration = createEmptyCopyConfiguration(destFs);
IcebergDataset.GetFilePathsToFileStatusResult pathsResult = icebergDataset.getFilePathsToFileStatus(destFs, copyConfiguration, shouldIncludeMetadataPath);
Map<Path, FileStatus> filePathsToFileStatus = pathsResult.getPathsToFileStatus();
Assert.assertEquals(filePathsToFileStatus.keySet(), expectedResultPaths);
// verify solely the path portion of the `FileStatus`, since that's all mock sets up
Assert.assertEquals(
filePathsToFileStatus.values().stream().map(FileStatus::getPath).collect(Collectors.toSet()),
expectedResultPaths);
return srcIcebergTable;
}
/** @return `paths` after adding to it all paths of every one of `snapshotDefs` */
protected static Set<Path> withAllSnapshotPaths(Set<Path> paths, boolean shouldIncludeMetadataPath, MockIcebergTable.SnapshotPaths... snapshotDefs) {
Arrays.stream(snapshotDefs).flatMap(snapshotDef ->
snapshotDef.asSnapshotInfo().getAllPaths(shouldIncludeMetadataPath).stream())
.forEach(p ->
paths.add(new Path(p))
);
return paths;
}
private CopyConfiguration createEmptyCopyConfiguration(FileSystem fs) {
return CopyConfiguration.builder(fs, copyConfigProperties).copyContext(new CopyContext()).build();
}
private static void verifyCopyEntities(Collection<CopyEntity> copyEntities, List<String> expected) {
List<String> actual = new ArrayList<>();
for (CopyEntity copyEntity : copyEntities) {
String json = copyEntity.toString();
if (isCopyableFile(json)) {
String filepath = CopyEntityDeserializer.getOriginFilePathAsStringFromJson(json);
actual.add(filepath);
} else{
verifyPostPublishStep(json, REGISTER_COMMIT_STEP);
}
}
Assert.assertEquals(actual.size(), expected.size(), "Set" + actual.toString() + " vs Set" + expected.toString());
Assert.assertEqualsNoOrder(actual.toArray(), expected.toArray());
}
public static boolean isCopyableFile(String json) {
String objectType = new Gson().fromJson(json, JsonObject.class)
.getAsJsonPrimitive("object-type")
.getAsString();
return objectType.equals("org.apache.gobblin.data.management.copy.CopyableFile") || objectType.equals("org.apache.gobblin.data.management.copy.iceberg.IcebergPartitionCopyableFile");
}
private static void verifyFsOwnershipAndPermissionPreservation(Collection<CopyEntity> copyEntities, Map<Path, FileStatus> expectedPathsAndFileStatuses) {
for (CopyEntity copyEntity : copyEntities) {
String copyEntityJson = copyEntity.toString();
if (isCopyableFile(copyEntityJson)) {
List<CopyEntityDeserializer.FileOwnerAndPermissions> ancestorFileOwnerAndPermissionsList =
CopyEntityDeserializer.getAncestorOwnerAndPermissions(copyEntityJson);
CopyEntityDeserializer.FileOwnerAndPermissions destinationFileOwnerAndPermissions = CopyEntityDeserializer.getDestinationOwnerAndPermissions(copyEntityJson);
Path filePath = new Path(CopyEntityDeserializer.getOriginFilePathAsStringFromJson(copyEntityJson));
FileStatus fileStatus = expectedPathsAndFileStatuses.get(filePath);
verifyFileStatus(destinationFileOwnerAndPermissions, fileStatus);
// providing path's parent to verify ancestor owner and permissions
verifyAncestorPermissions(ancestorFileOwnerAndPermissionsList, filePath.getParent(),
expectedPathsAndFileStatuses);
} else {
verifyPostPublishStep(copyEntityJson, REGISTER_COMMIT_STEP);
}
}
}
private static void verifyFileStatus(CopyEntityDeserializer.FileOwnerAndPermissions actual, FileStatus expected) {
Assert.assertEquals(actual.owner, expected.getOwner());
Assert.assertEquals(actual.group, expected.getGroup());
Assert.assertEquals(actual.userActionPermission, expected.getPermission().getUserAction().toString());
Assert.assertEquals(actual.groupActionPermission, expected.getPermission().getGroupAction().toString());
Assert.assertEquals(actual.otherActionPermission, expected.getPermission().getOtherAction().toString());
}
private static void verifyAncestorPermissions(List<CopyEntityDeserializer.FileOwnerAndPermissions> actualList, Path path, Map<Path, FileStatus> pathFileStatusMap) {
for (CopyEntityDeserializer.FileOwnerAndPermissions actual : actualList) {
FileStatus expected = pathFileStatusMap.getOrDefault(path, new FileStatus());
verifyFileStatus(actual, expected);
path = path.getParent();
}
}
public static void verifyPostPublishStep(String json, String expectedCommitStep) {
String actualCommitStep = new Gson().fromJson(json, JsonObject.class)
.getAsJsonObject("object-data").getAsJsonObject("step").getAsJsonPrimitive("object-type").getAsString();
Assert.assertEquals(actualCommitStep, expectedCommitStep);
}
/**
* Without this, our {@link FileSystem} mock would be lost by replacement from the {@link FileSystem#get} static, and
* that would prevent tests from effectively setting up certain source paths as existing.
* Instead override {@link IcebergDataset#getSourceFileSystemFromFileStatus(FileStatus, Configuration)} so that static
* is avoided entirely.
*/
protected static class TrickIcebergDataset extends IcebergDataset {
public TrickIcebergDataset(IcebergTable srcIcebergTable, IcebergTable destIcebergTable, Properties properties,
FileSystem sourceFs, boolean shouldIncludeManifestPath) {
super(srcIcebergTable, destIcebergTable, properties, sourceFs, shouldIncludeManifestPath);
}
@Override // as the `static` itself is not directly mock-able
protected FileSystem getSourceFileSystemFromFileStatus(FileStatus fileStatus, Configuration hadoopConfig) throws IOException {
return this.sourceFs;
}
}
;
/** Builds a {@link FileSystem} mock */
protected static class MockFileSystemBuilder {
private final URI fsURI;
/** when not `.isPresent()`, all paths exist; when `.get().isEmpty()`, none exist; else only those indicated do */
private final Optional<Map<Path, FileStatus>> optPathsWithFileStatuses;
public MockFileSystemBuilder(URI fsURI) {
this(fsURI, false);
}
public MockFileSystemBuilder(URI fsURI, boolean shouldRepresentEveryPath) {
this.fsURI = fsURI;
this.optPathsWithFileStatuses = shouldRepresentEveryPath ? Optional.empty() : Optional.of(Maps.newHashMap());
}
public void addPaths(List<String> pathStrings) {
Map<String, FileStatus> map = Maps.newHashMap();
for (String pathString : pathStrings) {
map.putIfAbsent(pathString, null);
}
addPathsAndFileStatuses(map);
}
public void addPathsAndFileStatuses(Map<String, FileStatus> pathAndFileStatuses) {
for (Map.Entry<String, FileStatus> entry : pathAndFileStatuses.entrySet()) {
String pathString = entry.getKey();
FileStatus fileStatus = entry.getValue();
addPathsAndFileStatuses(pathString, fileStatus);
}
}
public void addPathsAndFileStatuses(String pathString, FileStatus fileStatus) {
Path path = new Path(pathString);
if(fileStatus != null) { fileStatus.setPath(path);}
addPathAndFileStatus(path, fileStatus);
}
public void addPathAndFileStatus(Path path, FileStatus fileStatus) {
if (!this.optPathsWithFileStatuses.isPresent()) {
throw new IllegalStateException("unable to add paths and file statuses when constructed");
}
optPathsWithFileStatuses.get().putIfAbsent(path, fileStatus);
if (!path.isRoot()) { // recursively add ancestors of a previously unknown path
addPathAndFileStatus(path.getParent(), fileStatus);
}
}
public Map<Path, FileStatus> getPathsAndFileStatuses() {
return optPathsWithFileStatuses.get();
}
public FileSystem build()
throws IOException {
FileSystem fs = Mockito.mock(FileSystem.class);
Mockito.when(fs.getUri()).thenReturn(fsURI);
Mockito.when(fs.makeQualified(any(Path.class)))
.thenAnswer(invocation -> invocation.getArgument(0, Path.class).makeQualified(fsURI, new Path("/")));
if (!this.optPathsWithFileStatuses.isPresent()) {
Mockito.when(fs.getFileStatus(any(Path.class)))
.thenAnswer(invocation -> createEmptyFileStatus(invocation.getArgument(0, Path.class).toString()));
} else {
// WARNING: order is critical--specific paths *after* `any(Path)`; in addition, since mocking further
// an already-mocked instance, `.doReturn/.when` is needed (vs. `.when/.thenReturn`)
Mockito.when(fs.getFileStatus(any(Path.class))).thenThrow(new FileNotFoundException());
for (Map.Entry<Path, FileStatus> entry : this.optPathsWithFileStatuses.get().entrySet()) {
Path p = entry.getKey();
FileStatus fileStatus = entry.getValue();
Mockito.doReturn(fileStatus != null ? fileStatus : createEmptyFileStatus(p.toString())).when(fs).getFileStatus(p);
}
}
return fs;
}
public static FileStatus createEmptyFileStatus(String pathString) throws IOException {
Path path = new Path(pathString);
FileStatus fileStatus = new FileStatus();
fileStatus.setPath(path);
return fileStatus;
}
}
private static class MockIcebergTable {
@Data
public static class SnapshotPaths {
private final Optional<String> metadataPath;
private final String manifestListPath;
private final List<IcebergSnapshotInfo.ManifestFileInfo> manifestFiles;
private static final TableMetadata unusedStubMetadata = Mockito.mock(TableMetadata.class);
public IcebergSnapshotInfo asSnapshotInfo() {
return asSnapshotInfo(0L);
}
/** @param snapshotIdIndex used both as snapshot ID and as snapshot (epoch) timestamp */
public IcebergSnapshotInfo asSnapshotInfo(long snapshotIdIndex) {
return asSnapshotInfo(snapshotIdIndex, Instant.ofEpochMilli(snapshotIdIndex));
}
public IcebergSnapshotInfo asSnapshotInfo(Long snapshotId, Instant timestamp) {
return new IcebergSnapshotInfo(snapshotId, timestamp, this.metadataPath,
this.metadataPath.map(ignore -> unusedStubMetadata), // only set when `metadataPath.isPresent()`
this.manifestListPath, this.manifestFiles);
}
}
public static IcebergTable withSnapshots(TableIdentifier tableId, List<SnapshotPaths> snapshotPathSets) throws IOException {
IcebergTable table = Mockito.mock(IcebergTable.class);
Mockito.when(table.getTableId()).thenReturn(tableId);
int lastIndex = snapshotPathSets.size() - 1;
Mockito.when(table.getCurrentSnapshotInfoOverviewOnly())
.thenReturn(snapshotPathSets.get(lastIndex).asSnapshotInfo(lastIndex));
// ADMISSION: this is strictly more analogous to `IcebergTable.getAllSnapshotInfosIterator()`, as it doesn't
// filter only the delta... nonetheless, it should work fine for the tests herein
Mockito.when(table.getIncrementalSnapshotInfosIterator()).thenReturn(
IndexingStreams.transformWithIndex(snapshotPathSets.stream(),
(pathSet, i) -> pathSet.asSnapshotInfo(i)).iterator());
return table;
}
}
public static class IndexingStreams {
/** @return {@link Stream} equivalent of `inputs.zipWithIndex.map(f)` in scala */
public static <T, R> Stream<R> transformWithIndex(Stream<T> inputs, BiFunction<T, Integer, R> f) {
// given sketchy import, sequester for now within enclosing test class, rather than adding to `gobblin-utility`
return Streams.zip(
inputs, IntStream.iterate(0, i -> i + 1).boxed(), f);
}
}
protected static class CopyEntityDeserializer {
@Data
public static class FileOwnerAndPermissions {
String owner;
String group;
// assigning default values
String userActionPermission = FsAction.valueOf("READ_WRITE").toString();
String groupActionPermission = FsAction.valueOf("READ_WRITE").toString();
String otherActionPermission = FsAction.valueOf("READ_WRITE").toString();
}
public static String getOriginFilePathAsStringFromJson(String json) {
return new Gson().fromJson(json, JsonObject.class)
.getAsJsonObject("object-data")
.getAsJsonObject("origin")
.getAsJsonObject("object-data").getAsJsonObject("path").getAsJsonObject("object-data")
.getAsJsonObject("uri").getAsJsonPrimitive("object-data").getAsString();
}
public static String getDestinationFilePathAsStringFromJson(String json) {
return new Gson().fromJson(json, JsonObject.class)
.getAsJsonObject("object-data")
.getAsJsonObject("destination")
.getAsJsonObject("object-data")
.getAsJsonObject("uri").getAsJsonPrimitive("object-data").getAsString();
}
public static List<FileOwnerAndPermissions> getAncestorOwnerAndPermissions(String json) {
JsonArray ancestorsOwnerAndPermissions = new Gson().fromJson(json, JsonObject.class)
.getAsJsonObject("object-data")
.getAsJsonArray("ancestorsOwnerAndPermission");
List<FileOwnerAndPermissions> fileOwnerAndPermissionsList = Lists.newArrayList();
for (JsonElement jsonElement : ancestorsOwnerAndPermissions) {
fileOwnerAndPermissionsList.add(getFileOwnerAndPermissions(jsonElement.getAsJsonObject()));
}
return fileOwnerAndPermissionsList;
}
public static FileOwnerAndPermissions getDestinationOwnerAndPermissions(String json) {
JsonObject destinationOwnerAndPermissionsJsonObject = new Gson().fromJson(json, JsonObject.class)
.getAsJsonObject("object-data")
.getAsJsonObject("destinationOwnerAndPermission");
FileOwnerAndPermissions fileOwnerAndPermissions = getFileOwnerAndPermissions(destinationOwnerAndPermissionsJsonObject);
return fileOwnerAndPermissions;
}
private static FileOwnerAndPermissions getFileOwnerAndPermissions(JsonObject jsonObject) {
FileOwnerAndPermissions fileOwnerAndPermissions = new FileOwnerAndPermissions();
JsonObject objData = jsonObject.getAsJsonObject("object-data");
fileOwnerAndPermissions.owner = objData.has("owner") ? objData.getAsJsonPrimitive("owner").getAsString() : "";
fileOwnerAndPermissions.group = objData.has("group") ? objData.getAsJsonPrimitive("group").getAsString() : "";
JsonObject fsPermission = objData.has("fsPermission") ? objData.getAsJsonObject("fsPermission") : null;
if (fsPermission != null) {
JsonObject objectData = fsPermission.getAsJsonObject("object-data");
fileOwnerAndPermissions.userActionPermission =
objectData.getAsJsonObject("useraction").getAsJsonPrimitive("object-data").getAsString();
fileOwnerAndPermissions.groupActionPermission =
objectData.getAsJsonObject("groupaction").getAsJsonPrimitive("object-data").getAsString();
fileOwnerAndPermissions.otherActionPermission =
objectData.getAsJsonObject("otheraction").getAsJsonPrimitive("object-data").getAsString();
}
return fileOwnerAndPermissions;
}
}
}
|
googleapis/google-cloud-java | 38,187 | java-networkservices/proto-google-cloud-networkservices-v1/src/main/java/com/google/cloud/networkservices/v1/UpdateServiceBindingRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkservices/v1/service_binding.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkservices.v1;
/**
*
*
* <pre>
* Request used by the UpdateServiceBinding method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateServiceBindingRequest}
*/
public final class UpdateServiceBindingRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkservices.v1.UpdateServiceBindingRequest)
UpdateServiceBindingRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateServiceBindingRequest.newBuilder() to construct.
private UpdateServiceBindingRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateServiceBindingRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateServiceBindingRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.ServiceBindingProto
.internal_static_google_cloud_networkservices_v1_UpdateServiceBindingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.ServiceBindingProto
.internal_static_google_cloud_networkservices_v1_UpdateServiceBindingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateServiceBindingRequest.class,
com.google.cloud.networkservices.v1.UpdateServiceBindingRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int SERVICE_BINDING_FIELD_NUMBER = 2;
private com.google.cloud.networkservices.v1.ServiceBinding serviceBinding_;
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the serviceBinding field is set.
*/
@java.lang.Override
public boolean hasServiceBinding() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The serviceBinding.
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.ServiceBinding getServiceBinding() {
return serviceBinding_ == null
? com.google.cloud.networkservices.v1.ServiceBinding.getDefaultInstance()
: serviceBinding_;
}
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.ServiceBindingOrBuilder getServiceBindingOrBuilder() {
return serviceBinding_ == null
? com.google.cloud.networkservices.v1.ServiceBinding.getDefaultInstance()
: serviceBinding_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getServiceBinding());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getServiceBinding());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkservices.v1.UpdateServiceBindingRequest)) {
return super.equals(obj);
}
com.google.cloud.networkservices.v1.UpdateServiceBindingRequest other =
(com.google.cloud.networkservices.v1.UpdateServiceBindingRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasServiceBinding() != other.hasServiceBinding()) return false;
if (hasServiceBinding()) {
if (!getServiceBinding().equals(other.getServiceBinding())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasServiceBinding()) {
hash = (37 * hash) + SERVICE_BINDING_FIELD_NUMBER;
hash = (53 * hash) + getServiceBinding().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkservices.v1.UpdateServiceBindingRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request used by the UpdateServiceBinding method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateServiceBindingRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkservices.v1.UpdateServiceBindingRequest)
com.google.cloud.networkservices.v1.UpdateServiceBindingRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.ServiceBindingProto
.internal_static_google_cloud_networkservices_v1_UpdateServiceBindingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.ServiceBindingProto
.internal_static_google_cloud_networkservices_v1_UpdateServiceBindingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateServiceBindingRequest.class,
com.google.cloud.networkservices.v1.UpdateServiceBindingRequest.Builder.class);
}
// Construct using com.google.cloud.networkservices.v1.UpdateServiceBindingRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getServiceBindingFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
serviceBinding_ = null;
if (serviceBindingBuilder_ != null) {
serviceBindingBuilder_.dispose();
serviceBindingBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkservices.v1.ServiceBindingProto
.internal_static_google_cloud_networkservices_v1_UpdateServiceBindingRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateServiceBindingRequest
getDefaultInstanceForType() {
return com.google.cloud.networkservices.v1.UpdateServiceBindingRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateServiceBindingRequest build() {
com.google.cloud.networkservices.v1.UpdateServiceBindingRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateServiceBindingRequest buildPartial() {
com.google.cloud.networkservices.v1.UpdateServiceBindingRequest result =
new com.google.cloud.networkservices.v1.UpdateServiceBindingRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.networkservices.v1.UpdateServiceBindingRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.serviceBinding_ =
serviceBindingBuilder_ == null ? serviceBinding_ : serviceBindingBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkservices.v1.UpdateServiceBindingRequest) {
return mergeFrom((com.google.cloud.networkservices.v1.UpdateServiceBindingRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.networkservices.v1.UpdateServiceBindingRequest other) {
if (other
== com.google.cloud.networkservices.v1.UpdateServiceBindingRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasServiceBinding()) {
mergeServiceBinding(other.getServiceBinding());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getServiceBindingFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* ServiceBinding resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.networkservices.v1.ServiceBinding serviceBinding_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.ServiceBinding,
com.google.cloud.networkservices.v1.ServiceBinding.Builder,
com.google.cloud.networkservices.v1.ServiceBindingOrBuilder>
serviceBindingBuilder_;
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the serviceBinding field is set.
*/
public boolean hasServiceBinding() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The serviceBinding.
*/
public com.google.cloud.networkservices.v1.ServiceBinding getServiceBinding() {
if (serviceBindingBuilder_ == null) {
return serviceBinding_ == null
? com.google.cloud.networkservices.v1.ServiceBinding.getDefaultInstance()
: serviceBinding_;
} else {
return serviceBindingBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setServiceBinding(com.google.cloud.networkservices.v1.ServiceBinding value) {
if (serviceBindingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
serviceBinding_ = value;
} else {
serviceBindingBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setServiceBinding(
com.google.cloud.networkservices.v1.ServiceBinding.Builder builderForValue) {
if (serviceBindingBuilder_ == null) {
serviceBinding_ = builderForValue.build();
} else {
serviceBindingBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeServiceBinding(com.google.cloud.networkservices.v1.ServiceBinding value) {
if (serviceBindingBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& serviceBinding_ != null
&& serviceBinding_
!= com.google.cloud.networkservices.v1.ServiceBinding.getDefaultInstance()) {
getServiceBindingBuilder().mergeFrom(value);
} else {
serviceBinding_ = value;
}
} else {
serviceBindingBuilder_.mergeFrom(value);
}
if (serviceBinding_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearServiceBinding() {
bitField0_ = (bitField0_ & ~0x00000002);
serviceBinding_ = null;
if (serviceBindingBuilder_ != null) {
serviceBindingBuilder_.dispose();
serviceBindingBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.ServiceBinding.Builder getServiceBindingBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getServiceBindingFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.ServiceBindingOrBuilder
getServiceBindingOrBuilder() {
if (serviceBindingBuilder_ != null) {
return serviceBindingBuilder_.getMessageOrBuilder();
} else {
return serviceBinding_ == null
? com.google.cloud.networkservices.v1.ServiceBinding.getDefaultInstance()
: serviceBinding_;
}
}
/**
*
*
* <pre>
* Required. Updated ServiceBinding resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.ServiceBinding service_binding = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.ServiceBinding,
com.google.cloud.networkservices.v1.ServiceBinding.Builder,
com.google.cloud.networkservices.v1.ServiceBindingOrBuilder>
getServiceBindingFieldBuilder() {
if (serviceBindingBuilder_ == null) {
serviceBindingBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.ServiceBinding,
com.google.cloud.networkservices.v1.ServiceBinding.Builder,
com.google.cloud.networkservices.v1.ServiceBindingOrBuilder>(
getServiceBinding(), getParentForChildren(), isClean());
serviceBinding_ = null;
}
return serviceBindingBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkservices.v1.UpdateServiceBindingRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkservices.v1.UpdateServiceBindingRequest)
private static final com.google.cloud.networkservices.v1.UpdateServiceBindingRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkservices.v1.UpdateServiceBindingRequest();
}
public static com.google.cloud.networkservices.v1.UpdateServiceBindingRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateServiceBindingRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateServiceBindingRequest>() {
@java.lang.Override
public UpdateServiceBindingRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateServiceBindingRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateServiceBindingRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateServiceBindingRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/geode | 38,255 | geode-cq/src/distributedTest/java/org/apache/geode/internal/cache/tier/sockets/DurableClientSimpleDUnitTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier.sockets;
import static java.lang.Thread.sleep;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static org.apache.geode.cache.InterestResultPolicy.NONE;
import static org.apache.geode.cache.Region.SEPARATOR;
import static org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil.TYPE_CREATE;
import static org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil.createCacheClient;
import static org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil.createCacheClientFromXmlN;
import static org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil.createCacheClients;
import static org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil.createCacheServer;
import static org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil.createCacheServerFromXmlN;
import static org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil.createClientCache;
import static org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil.getCache;
import static org.apache.geode.internal.cache.tier.sockets.CacheServerTestUtil.getClientCache;
import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
import static org.apache.geode.test.dunit.NetworkUtils.getServerHostName;
import static org.apache.geode.test.dunit.Wait.pause;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.CacheException;
import org.apache.geode.cache.ClientSession;
import org.apache.geode.cache.InterestResultPolicy;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.client.Pool;
import org.apache.geode.cache.client.PoolManager;
import org.apache.geode.cache.client.internal.PoolImpl;
import org.apache.geode.cache30.CacheSerializableRunnable;
import org.apache.geode.distributed.internal.DistributionConfig;
import org.apache.geode.internal.cache.ha.HARegionQueue;
import org.apache.geode.internal.cache.ha.HARegionQueueStats;
import org.apache.geode.test.dunit.SerializableRunnableIF;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.junit.categories.ClientSubscriptionTest;
@Category({ClientSubscriptionTest.class})
public class DurableClientSimpleDUnitTest extends DurableClientTestBase {
/**
* Test that a durable client correctly receives updates.
*/
@Test
public void testSimpleDurableClientUpdate() {
// Start a server
server1Port = server1VM
.invoke(() -> createCacheServer(regionName, true));
// Start a durable client that is not kept alive on the server when it stops
// normally
final String durableClientId = getName() + "_client";
durableClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, true), regionName,
getClientDistributedSystemProperties(durableClientId), true));
// Send clientReady message
sendClientReady(durableClientVM);
registerInterest(durableClientVM, regionName, true, InterestResultPolicy.NONE);
// Start normal publisher client
publisherClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, false),
regionName));
// Publish some entries
final int numberOfEntries = 10;
publishEntries(publisherClientVM, 0, 10);
// Verify the durable client received the updates
checkListenerEvents(numberOfEntries, 1, -1, durableClientVM);
// Stop the durable client
durableClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop the publisher client
publisherClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop the server
server1VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
}
/**
* Test that a durable client VM with multiple BridgeClients correctly registers on the server.
*/
@Test
public void testMultipleBridgeClientsInSingleDurableVM() {
// Start a server
server1Port = server1VM.invoke(() -> createCacheServer(regionName, true));
// Start a durable client with 2 regions (and 2 BridgeClients) that is not
// kept alive on the server when it stops normally
final String durableClientId = getName() + "_client";
final String regionName1 = regionName + "1";
final String regionName2 = regionName + "2";
durableClientVM.invoke(() -> createCacheClients(
getClientPool(getServerHostName(), server1Port, true), regionName1,
regionName2, getClientDistributedSystemProperties(durableClientId)));
// Send clientReady message
durableClientVM.invoke("Send clientReady", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
assertThat(PoolManager.getAll()).hasSize(2);
getClientCache().readyForEvents();
}
});
// Verify durable clients on server
server1VM.invoke("Verify durable client", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
// Get the CacheClientNotifier
CacheClientNotifier notifier = getBridgeServer().getAcceptor().getCacheClientNotifier();
// Iterate the CacheClientProxies
checkNumberOfClientProxies(2);
String firstProxyRegionName = null;
for (CacheClientProxy proxy : notifier.getClientProxies()) {
assertThat(proxy.isDurable()).isTrue();
assertThat(proxy.getDurableId()).isEqualTo(durableClientId);
assertThat(proxy.getDurableTimeout())
.isEqualTo(DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT);
// Verify the two HA region names aren't the same
if (firstProxyRegionName == null) {
firstProxyRegionName = proxy.getHARegionName();
} else {
assertThat(proxy.getHARegionName()).isNotEqualTo(firstProxyRegionName);
}
}
}
});
// Stop the durable client
durableClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Verify the durable client is no longer on the server
server1VM.invoke("Verify durable client", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
// Find the proxy
checkNumberOfClientProxies(0);
}
});
// Stop the server
server1VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
}
/**
* Test that the server correctly processes starting two durable clients.
*/
@Test
public void testSimpleTwoDurableClients() {
// Start a server
server1Port = server1VM
.invoke(() -> createCacheServer(regionName, true));
// Start a durable client that is not kept alive on the server when it
// stops normally
final String durableClientId = getName() + "_client";
durableClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, true), regionName,
getClientDistributedSystemProperties(durableClientId)));
// Send clientReady message
sendClientReady(durableClientVM);
// Start another durable client that is not kept alive on the server when
// it stops normally. Use the 'publisherClientVM' as a durable client.
VM durableClient2VM = publisherClientVM;
final String durableClientId2 = getName() + "_client2";
durableClient2VM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, true), regionName,
getClientDistributedSystemProperties(durableClientId2)));
// Send clientReady message
sendClientReady(durableClient2VM);
// Verify durable clients on server
server1VM.invoke("Verify durable client", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
// Get the CacheClientNotifier
CacheClientNotifier notifier = getBridgeServer().getAcceptor().getCacheClientNotifier();
// Iterate the CacheClientProxies and verify they are correct
checkNumberOfClientProxies(2);
boolean durableClient1Found = false, durableClient2Found = false;
for (CacheClientProxy proxy : notifier.getClientProxies()) {
assertThat(proxy.isDurable()).isTrue();
if (proxy.getDurableId().equals(durableClientId)) {
durableClient1Found = true;
}
if (proxy.getDurableId().equals(durableClientId2)) {
durableClient2Found = true;
}
assertThat(proxy.getDurableTimeout())
.isEqualTo(DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT);
}
assertThat(durableClient1Found).isTrue();
assertThat(durableClient2Found).isTrue();
}
});
// Stop the durable clients
durableClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
durableClient2VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop the server
server1VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
}
/**
* Test that updates to two durable clients are processed correctly.
*/
@Test
public void testTwoDurableClientsStartStopUpdate() throws InterruptedException {
// Start a server
server1Port = server1VM
.invoke(() -> createCacheServer(regionName, true));
// Start a durable client that is kept alive on the server when it stops
// normally
final String durableClientId = getName() + "_client";
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
// final boolean durableClientKeepAlive = true; // keep the client alive when it stops normally
durableClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, true), regionName,
getClientDistributedSystemProperties(durableClientId, durableClientTimeout),
true));
// Send clientReady message
sendClientReady(durableClientVM);
registerInterest(durableClientVM, regionName, true, NONE);
// Start another durable client that is not kept alive on the server when
// it stops normally. Use the 'server2VM' as the second durable client.
VM durableClient2VM = server2VM;
final String durableClientId2 = getName() + "_client2";
durableClient2VM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, true), regionName,
getClientDistributedSystemProperties(durableClientId2, durableClientTimeout),
true));
// Send clientReady message
sendClientReady(durableClient2VM);
registerInterest(durableClient2VM, regionName, true, NONE);
// Verify durable clients on server
verifyMultupleDurableClients(durableClientId, durableClientId2);
// Start normal publisher client
publisherClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, false),
regionName));
// Publish some entries
final int numberOfEntries = 10;
publishEntries(publisherClientVM, 0, numberOfEntries);
// Verify durable client 1 received the updates
checkListenerEvents(numberOfEntries, 1, -1, durableClientVM);
// Verify durable client 2 received the updates
checkListenerEvents(numberOfEntries, 1, -1, durableClient2VM);
// ARB: Wait for queue ack to arrive at server.
sleep(1000);
// Stop the durable clients
durableClientVM.invoke(() -> CacheServerTestUtil.closeCache(true));
durableClient2VM.invoke(() -> CacheServerTestUtil.closeCache(true));
// Verify the durable clients still exist on the server
verifyMultupleDurableClients(durableClientId, durableClientId2);
// Publish some more entries
publishEntries(publisherClientVM, 10, numberOfEntries);
sleep(1000);
// Verify the durable clients' queues contain the entries
server1VM.invoke("Verify durable client", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
// Get the CacheClientNotifier
CacheClientNotifier notifier = getBridgeServer().getAcceptor().getCacheClientNotifier();
// Iterate the CacheClientProxies and verify the queue sizes
checkNumberOfClientProxies(2);
for (CacheClientProxy proxy : notifier.getClientProxies()) {
assertThat(proxy.getQueueSize()).isEqualTo(numberOfEntries);
}
}
});
// Re-start durable client 1
durableClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, true), regionName,
getClientDistributedSystemProperties(durableClientId), true));
// Send clientReady message
sendClientReady(durableClientVM);
// Re-start durable client 2
durableClient2VM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, true), regionName,
getClientDistributedSystemProperties(durableClientId2), true));
// Send clientReady message
sendClientReady(durableClient2VM);
// Verify durable client 1 received the updates held for it on the server
checkListenerEvents(numberOfEntries, 1, -1, durableClientVM);
// Verify durable client 2 received the updates held for it on the server
checkListenerEvents(numberOfEntries, 1, -1, durableClientVM);
// Stop durable client 1
durableClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop durable client 2
durableClient2VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop the publisher client
publisherClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop the server
server1VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
}
private void verifyMultupleDurableClients(String durableClientId,
String durableClientId2) {
server1VM.invoke("Verify durable client", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
// Get the CacheClientNotifier
CacheClientNotifier notifier = getBridgeServer().getAcceptor().getCacheClientNotifier();
// Iterate the CacheClientProxies and verify they are correct
checkNumberOfClientProxies(2);
boolean durableClient1Found = false, durableClient2Found = false;
for (CacheClientProxy proxy : notifier.getClientProxies()) {
assertThat(proxy.isDurable()).isTrue();
if (proxy.getDurableId().equals(durableClientId)) {
durableClient1Found = true;
}
if (proxy.getDurableId().equals(durableClientId2)) {
durableClient2Found = true;
}
assertThat(60).isEqualTo(proxy.getDurableTimeout());
}
assertThat(durableClient1Found).isTrue();
assertThat(durableClient2Found).isTrue();
}
});
}
/**
* Tests whether a durable client reconnects properly to two servers.
*/
@Test
public void testDurableClientReconnectTwoServers() throws InterruptedException {
// Start server 1
server1Port = server1VM.invoke(
() -> createCacheServer(regionName, true));
// on test flag for periodic ack
server1VM
.invoke(() -> setTestFlagToVerifyActForMarker(true));
// Start server 2 using the same mcast port as server 1
final int server2Port = server2VM
.invoke(() -> createCacheServer(regionName, true));
// Stop server 2
server2VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Start a durable client that is kept alive on the server when it stops
// normally
final String durableClientId = getName() + "_client";
final int durableClientTimeout = 60; // keep the client alive for 60 seconds
// final boolean durableClientKeepAlive = true; // keep the client alive when it stops normally
durableClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, server2Port, true),
regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout),
true));
// Send clientReady message
sendClientReady(durableClientVM);
registerInterest(durableClientVM, regionName, true, NONE);
// Verify durable client on server 1
server1VM.invoke("Verify durable client", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
// Find the proxy
checkNumberOfClientProxies(1);
CacheClientProxy proxy = getClientProxy();
assertThat(proxy).isNotNull();
// Verify that it is durable and its properties are correct
assertThat(proxy.isDurable()).isTrue();
assertThat(durableClientId).isEqualTo(proxy.getDurableId());
assertThat(durableClientTimeout).isEqualTo(proxy.getDurableTimeout());
verifyReceivedMarkerAck();
}
});
// VJR: wait for ack to go out
pause(5000);
// Stop the durable client
durableClientVM.invoke(() -> CacheServerTestUtil.closeCache(true));
// Verify durable client on server 1
server1VM.invoke("Verify durable client", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
// Find the proxy
checkNumberOfClientProxies(1);
CacheClientProxy proxy = getClientProxy();
assertThat(proxy).isNotNull();
}
});
// Re-start server2
server2VM.invoke(() -> createCacheServer(regionName, true,
server2Port));
// Start normal publisher client
publisherClientVM.invoke(() -> createCacheClient(getClientPool(getServerHostName(),
server1Port, server2Port, false), regionName));
// Publish some entries
final int numberOfEntries = 10;
publishEntries(publisherClientVM, 0, numberOfEntries);
sleep(1000);
// Verify the durable client's queue contains the entries
server1VM.invoke("Verify durable client", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
// Find the proxy
CacheClientProxy proxy = getClientProxy();
assertThat(proxy).isNotNull();
// Verify the queue size
assertThat(numberOfEntries).isEqualTo(proxy.getQueueSize());
}
});
// Re-start the durable client that is kept alive on the server when it stops
// normally
durableClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, server2Port, true),
regionName, getClientDistributedSystemProperties(durableClientId, durableClientTimeout),
true));
// Send clientReady message
sendClientReady(durableClientVM);
// Verify durable client on server 1
verifyDurableClientPresence(durableClientTimeout, durableClientId, server1VM, 1);
// Verify durable client on server 2
verifyDurableClientPresence(durableClientTimeout, durableClientId, server2VM, 1);
// Verify the HA region names are the same on both servers
String server1HARegionQueueName =
server1VM.invoke(DurableClientTestBase::getHARegionQueueName);
String server2HARegionQueueName =
server2VM.invoke(DurableClientTestBase::getHARegionQueueName);
assertThat(server1HARegionQueueName).isEqualTo(server2HARegionQueueName);
// Verify the durable client received the updates
checkListenerEvents(numberOfEntries, 1, -1, durableClientVM);
// Stop the durable client
durableClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop the publisher client
publisherClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// off test flag for periodic ack
server1VM.invoke(() -> setTestFlagToVerifyActForMarker(false));
// Stop server 1
server1VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop server 2
server2VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
}
@Test
public void testReadyForEventsNotCalledImplicitly() {
// Start a server
server1Port = server1VM
.invoke(() -> createCacheServer(regionName, true));
// Start a durable client that is not kept alive on the server when it
// stops normally
final String durableClientId = getName() + "_client";
// make the client use ClientCacheFactory so it will have a default pool
durableClientVM.invoke(() -> createClientCache(
getClientPool(getServerHostName(), server1Port, true), regionName,
getClientDistributedSystemProperties(durableClientId)));
// verify that readyForEvents has not yet been called on the client's default pool
durableClientVM.invoke("check readyForEvents not called", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
for (Pool p : PoolManager.getAll().values()) {
assertThat(((PoolImpl) p).getReadyForEventsCalled()).isFalse();
}
}
});
// Send clientReady message
sendClientReady(durableClientVM);
// Verify durable clients on server
server1VM.invoke("Verify durable client", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
// Get the CacheClientNotifier
CacheClientNotifier notifier = getBridgeServer().getAcceptor().getCacheClientNotifier();
// Iterate the CacheClientProxies and verify they are correct
checkNumberOfClientProxies(1);
boolean durableClient1Found = false;
for (CacheClientProxy proxy : notifier.getClientProxies()) {
assertThat(proxy.isDurable()).isTrue();
if (proxy.getDurableId().equals(durableClientId)) {
durableClient1Found = true;
}
assertThat(DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT)
.isEqualTo(proxy.getDurableTimeout());
}
assertThat(durableClient1Found).isTrue();
}
});
// Stop the durable clients
durableClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop the server
server1VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
}
@Test
public void testReadyForEventsNotCalledImplicitlyForRegisterInterestWithCacheXML() {
regionName = "testReadyForEventsNotCalledImplicitlyWithCacheXML_region";
// Start a server
server1Port =
server1VM.invoke(() -> createCacheServerFromXmlN(
DurableClientTestBase.class.getResource("durablecq-server-cache.xml")));
// Start a durable client that is not kept alive on the server when it
// stops normally
final String durableClientId = getName() + "_client";
// create client cache from xml
durableClientVM.invoke(() -> createCacheClientFromXmlN(
DurableClientTestBase.class.getResource("durablecq-client-cache.xml"), "client",
durableClientId, DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT, true));
// verify that readyForEvents has not yet been called on all the client's pools
durableClientVM.invoke("check readyForEvents not called", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
for (Pool p : PoolManager.getAll().values()) {
assertThat(((PoolImpl) p).getReadyForEventsCalled()).isFalse();
}
}
});
// Send clientReady message
sendClientReady(durableClientVM);
// Durable client registers durable cq on server
registerInterest(durableClientVM, regionName, true, InterestResultPolicy.KEYS_VALUES);
// Verify durable client on server1
verifyDurableClientPresent(DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT, durableClientId,
server1VM);
// Start normal publisher client
publisherClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, false),
regionName));
// Publish some entries
final int numberOfEntries = 10;
publishEntries(publisherClientVM, 0, numberOfEntries);
// Verify the durable client received the updates
checkListenerEvents(numberOfEntries, 1, -1, durableClientVM);
// Stop the durable client
durableClientVM.invoke(() -> CacheServerTestUtil.closeCache(true));
// Verify the durable client still exists on the server
verifyDurableClientPresent(DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT, durableClientId,
server1VM);
// Publish some more entries
publisherClientVM.invoke("Publish additional updates", new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
// Get the region
Region<String, String> region = getCache().getRegion(regionName);
assertThat(region).isNotNull();
// Publish some entries
for (int i = 0; i < numberOfEntries; i++) {
String keyAndValue = String.valueOf(i);
region.put(keyAndValue, keyAndValue + "lkj");
}
}
});
publisherClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Re-start the durable client
durableClientVM.invoke(() -> createCacheClientFromXmlN(
DurableClientTestBase.class.getResource("durablecq-client-cache.xml"), "client",
durableClientId, DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT, true));
// Durable client registers durable cq on server'
registerInterest(durableClientVM, regionName, true, InterestResultPolicy.KEYS_VALUES);
// Send clientReady message
sendClientReady(durableClientVM);
// Verify durable client on server
verifyDurableClientPresent(DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT, durableClientId,
server1VM);
// Verify the durable client received the updates held for it on the server
checkListenerEvents(numberOfEntries, 1, -1, durableClientVM);
// Stop the durable client
durableClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop the server
server1VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
}
/**
* Test functionality to close the durable client and drain all events from the ha queue from the
* server
*/
@Test
public void testCloseCacheProxy() {
String greaterThan5Query = "select * from " + SEPARATOR + regionName + " p where p.ID > 5";
String allQuery = "select * from " + SEPARATOR + regionName + " p where p.ID > -1";
String lessThan5Query = "select * from " + SEPARATOR + regionName + " p where p.ID < 5";
// Start a server
server1Port = server1VM
.invoke(() -> createCacheServer(regionName, true));
// Start a durable client that is kept alive on the server when it stops
// normally
durableClientId = getName() + "_client";
startDurableClient(durableClientVM, durableClientId, server1Port, regionName);
// register durable cqs
createCq(durableClientVM, "GreaterThan5", greaterThan5Query, true);
createCq(durableClientVM, "All", allQuery, true);
createCq(durableClientVM, "LessThan5", lessThan5Query, true);
// send client ready
sendClientReady(durableClientVM);
// Verify durable client on server
verifyDurableClientPresent(DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT, durableClientId,
server1VM);
// Stop the durable client
disconnectDurableClient(true);
// Start normal publisher client
startClient(publisherClientVM, server1Port, regionName);
// Publish some entries
publishEntries(publisherClientVM, regionName, 10);
// verify cq stats are correct
checkNumDurableCqs(server1VM, durableClientId, 3);
checkCqStatOnServer(server1VM, durableClientId, "All", 10);
checkCqStatOnServer(server1VM, durableClientId, "GreaterThan5", 4);
checkCqStatOnServer(server1VM, durableClientId, "LessThan5", 5);
// drop client proxy
server1VM.invoke("Close client proxy on server for client" + durableClientId,
new CacheSerializableRunnable() {
@Override
public void run2() throws CacheException {
final CacheClientNotifier ccnInstance = CacheClientNotifier.getInstance();
ccnInstance.closeDurableClientProxy(durableClientId);
}
});
// Restart the durable client
startDurableClient(durableClientVM, durableClientId, server1Port, regionName);
// check that cqs are no longer registered
checkNumDurableCqs(server1VM, durableClientId, 0);
// Reregister durable cqs
createCq(durableClientVM, "GreaterThan5",
"select * from " + SEPARATOR + regionName + " p where p.ID > 5",
true);
createCq(durableClientVM, "All",
"select * from " + SEPARATOR + regionName + " p where p.ID > -1", true);
createCq(durableClientVM, "LessThan5",
"select * from " + SEPARATOR + regionName + " p where p.ID < 5",
true);
// Before sending client ready, lets make sure the stats already reflect 0 queued events
checkCqStatOnServer(server1VM, durableClientId, "LessThan5", 0);
checkCqStatOnServer(server1VM, durableClientId, "GreaterThan5", 0);
checkCqStatOnServer(server1VM, durableClientId, "All", 0);
// send client ready
sendClientReady(durableClientVM);
// verify cq events for all 3 cqs are 0 events
checkCqListenerEvents(durableClientVM, "GreaterThan5", 0 /* numEventsExpected */,
/* numEventsToWaitFor */ 5/* secondsToWait */);
checkCqListenerEvents(durableClientVM, "LessThan5", 0 /* numEventsExpected */,
/* numEventsToWaitFor */ 5/* secondsToWait */);
checkCqListenerEvents(durableClientVM, "All", 0 /* numEventsExpected */,
/* numEventsToWaitFor */ 5/* secondsToWait */);
// Stop the durable client
durableClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop the publisher client
publisherClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop the server
server1VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
}
/**
* Test that starting a durable client on multiple servers is processed correctly.
*/
@Test
public void testSimpleDurableClientMultipleServers() {
// Start server 1
server1Port = server1VM.invoke(
() -> createCacheServer(regionName, true));
// Start server 2 using the same mcast port as server 1
final int server2Port = server2VM
.invoke(() -> createCacheServer(regionName, true));
// Start a durable client connected to both servers that is kept alive when
// it stops normally
durableClientId = getName() + "_client";
durableClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, server2Port, true),
regionName,
getClientDistributedSystemProperties(durableClientId, VERY_LONG_DURABLE_TIMEOUT_SECONDS),
true));
// Send clientReady message
sendClientReady(durableClientVM);
// Verify durable client on server 1
verifyDurableClientPresence(VERY_LONG_DURABLE_TIMEOUT_SECONDS, durableClientId, server1VM, 1);
// Verify durable client on server 2
verifyDurableClientPresence(VERY_LONG_DURABLE_TIMEOUT_SECONDS, durableClientId, server2VM, 1);
// Stop the durable client
durableClientVM.invoke(() -> CacheServerTestUtil.closeCache(true));
// Verify the durable client is still on server 1
verifyDurableClientPresence(VERY_LONG_DURABLE_TIMEOUT_SECONDS, durableClientId, server1VM, 1);
// Verify the durable client is still on server 2
verifyDurableClientPresence(VERY_LONG_DURABLE_TIMEOUT_SECONDS, durableClientId, server2VM, 1);
// Start up the client again. This time initialize it so that it is not kept
// alive on the servers when it stops normally.
durableClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, server2Port, true),
regionName, getClientDistributedSystemProperties(durableClientId), true));
// Send clientReady message
sendClientReady(durableClientVM);
// Verify durable client on server1
verifyDurableClientPresence(DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT, durableClientId,
server1VM, 1);
// Verify durable client on server2
verifyDurableClientPresence(DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT, durableClientId,
server2VM, 1);
// Stop the durable client
durableClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
verifyDurableClientPresence(DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT,
durableClientId, server1VM, 0);
verifyDurableClientPresence(DistributionConfig.DEFAULT_DURABLE_CLIENT_TIMEOUT,
durableClientId, server2VM, 0);
// Stop server 1
server1VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop server 2
server2VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
}
@Test
public void testDurableClientReceivedClientSessionInitialValue() {
// Start server 1
server1Port = server1VM
.invoke(() -> createCacheServer(regionName, true));
// Start server 2
int server2Port = server2VM
.invoke(() -> createCacheServer(regionName, true));
// Start normal publisher client
publisherClientVM.invoke(() -> createCacheClient(getClientPool(getServerHostName(),
server1Port, server2Port, false), regionName));
// Create an entry
publishEntries(publisherClientVM, 0, 1);
// Start a durable client with the ControlListener
durableClientId = getName() + "_client";
durableClientVM.invoke(() -> createCacheClient(
getClientPool(getServerHostName(), server1Port, server2Port, true), regionName,
getClientDistributedSystemProperties(durableClientId, VERY_LONG_DURABLE_TIMEOUT_SECONDS),
true));
durableClientVM.invoke(() -> {
await().atMost(HEAVY_TEST_LOAD_DELAY_SUPPORT_MULTIPLIER, MINUTES)
.pollInterval(100, MILLISECONDS)
.untilAsserted(() -> assertThat(getCache()).isNotNull());
});
// Send clientReady message
sendClientReady(durableClientVM);
// Use ClientSession on the server to ` in entry key on behalf of durable client
boolean server1IsPrimary = false;
boolean registered = server1VM.invoke(() -> DurableClientSimpleDUnitTest
.registerInterestWithClientSession(durableClientId, regionName, String.valueOf(0)));
if (registered) {
server1IsPrimary = true;
} else {
registered = server2VM.invoke(() -> DurableClientSimpleDUnitTest
.registerInterestWithClientSession(durableClientId, regionName, String.valueOf(0)));
}
assertThat(registered)
.describedAs("ClientSession interest registration failed to occur in either server.")
.isTrue();
// Verify durable client received create event
checkListenerEvents(1, 1, TYPE_CREATE, durableClientVM);
// Wait for QRM to be processed on the secondary
waitForEventsRemovedByQueueRemovalMessage(server1IsPrimary ? server2VM : server1VM,
durableClientId);
// Stop durable client
disconnectDurableClient(true);
// restart durable client
restartDurableClient(VERY_LONG_DURABLE_TIMEOUT_SECONDS,
getClientPool(getServerHostName(), server1Port, server2Port, true), true);
// Send clientReady message
sendClientReady(durableClientVM);
// Verify durable client does not receive create event
checkListenerEvents(0, 1, TYPE_CREATE, durableClientVM);
// Stop the durable client
durableClientVM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop server 1
server1VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
// Stop server 2
server2VM.invoke((SerializableRunnableIF) CacheServerTestUtil::closeCache);
}
private static boolean registerInterestWithClientSession(String durableClientId,
String regionName,
Object keyOfInterest) {
ClientSession session = getBridgeServer().getClientSession(durableClientId);
boolean registered = false;
if (session.isPrimary()) {
session.registerInterest(regionName, keyOfInterest, InterestResultPolicy.KEYS_VALUES, true,
true);
registered = true;
}
return registered;
}
private void waitForEventsRemovedByQueueRemovalMessage(VM secondaryServerVM,
final String durableClientId) {
secondaryServerVM.invoke(() -> DurableClientSimpleDUnitTest
.waitForEventsRemovedByQueueRemovalMessage(durableClientId));
}
private static void waitForEventsRemovedByQueueRemovalMessage(String durableClientId) {
CacheClientNotifier ccn = CacheClientNotifier.getInstance();
CacheClientProxy ccp = ccn.getClientProxy(durableClientId);
HARegionQueue haRegionQueue = ccp.getHARegionQueue();
HARegionQueueStats haRegionQueueStats = haRegionQueue.getStatistics();
await()
.untilAsserted(
() -> assertThat(haRegionQueueStats.getEventsRemovedByQrm()).describedAs(
"Expected queue removal messages: " + 2 + " but actual messages: "
+ haRegionQueueStats.getEventsRemovedByQrm())
.isEqualTo(2));
}
}
|
apache/storm | 37,986 | storm-client/test/jvm/org/apache/storm/TestConfigValidate.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.security.auth.Subject;
import org.apache.storm.blobstore.BlobStore;
import org.apache.storm.blobstore.NimbusBlobStore;
import org.apache.storm.generated.AuthorizationException;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.generated.KeyNotFoundException;
import org.apache.storm.security.auth.ReqContext;
import org.apache.storm.shade.com.google.common.collect.ImmutableList;
import org.apache.storm.shade.com.google.common.collect.ImmutableMap;
import org.apache.storm.utils.Utils;
import org.apache.storm.validation.ConfigValidation;
import org.apache.storm.validation.ConfigValidation.ImpersonationAclUserEntryValidator;
import org.apache.storm.validation.ConfigValidation.IntegerValidator;
import org.apache.storm.validation.ConfigValidation.KryoRegValidator;
import org.apache.storm.validation.ConfigValidation.LongValidator;
import org.apache.storm.validation.ConfigValidation.ListEntryTypeValidator;
import org.apache.storm.validation.ConfigValidation.ListOfListOfStringValidator;
import org.apache.storm.validation.ConfigValidation.NoDuplicateInListValidator;
import org.apache.storm.validation.ConfigValidation.NotNullValidator;
import org.apache.storm.validation.ConfigValidation.PositiveNumberValidator;
import org.apache.storm.validation.ConfigValidation.PowerOf2Validator;
import org.apache.storm.validation.ConfigValidation.RasConstraintsTypeValidator;
import org.apache.storm.validation.ConfigValidation.StringValidator;
import org.apache.storm.validation.ConfigValidation.UserResourcePoolEntryValidator;
import org.apache.storm.validation.ConfigValidationAnnotations.IsExactlyOneOf;
import org.apache.storm.validation.ConfigValidationAnnotations.IsImplementationOfClass;
import org.apache.storm.validation.ConfigValidationAnnotations.IsListEntryCustom;
import org.apache.storm.validation.ConfigValidationAnnotations.IsListEntryType;
import org.apache.storm.validation.ConfigValidationAnnotations.IsMapEntryCustom;
import org.apache.storm.validation.ConfigValidationAnnotations.IsMapEntryType;
import org.apache.storm.validation.ConfigValidationAnnotations.IsNoDuplicateInList;
import org.apache.storm.validation.ConfigValidationAnnotations.IsString;
import org.apache.storm.validation.ConfigValidationAnnotations.NotNull;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestConfigValidate {
@Test
public void validPacemakerAuthTest() {
Map<String, Object> conf = new HashMap<>();
conf.put(Config.PACEMAKER_AUTH_METHOD, "NONE");
ConfigValidation.validateFields(conf);
conf.put(Config.PACEMAKER_AUTH_METHOD, "DIGEST");
ConfigValidation.validateFields(conf);
conf.put(Config.PACEMAKER_AUTH_METHOD, "KERBEROS");
ConfigValidation.validateFields(conf);
}
@Test
public void invalidPacemakerAuthTest() {
Map<String, Object> conf = new HashMap<>();
conf.put(Config.PACEMAKER_AUTH_METHOD, "invalid");
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(conf));
}
@Test
public void validConfigTest() {
Map<String, Object> conf = new HashMap<>();
conf.put(Config.STORM_MESSAGING_NETTY_SOCKET_BACKLOG, 5);
conf.put(Config.STORM_MESSAGING_NETTY_MIN_SLEEP_MS, 500);
conf.put(Config.STORM_MESSAGING_NETTY_AUTHENTICATION, true);
ConfigValidation.validateFields(conf);
}
@Test
public void invalidConfigTest() {
Map<String, Object> conf = new HashMap<>();
conf.put(Config.STORM_MESSAGING_NETTY_SOCKET_BACKLOG, 5);
conf.put(Config.STORM_MESSAGING_NETTY_MIN_SLEEP_MS, 500);
conf.put(Config.STORM_MESSAGING_NETTY_AUTHENTICATION, "invalid");
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(conf));
}
@Test
public void testValidateTopologyBlobStoreMapWithBlobStore() throws Throwable {
Map<String, Object> topoConf = new HashMap<>();
Map<String, Map<String, String>> topologyMap = new HashMap<>();
topologyMap.put("key1", new HashMap<>());
topologyMap.put("key2", new HashMap<>());
topoConf.put(Config.TOPOLOGY_BLOBSTORE_MAP, topologyMap);
Subject subject = ReqContext.context().subject();
BlobStore blobStoreMock = mock(BlobStore.class);
when(blobStoreMock.getBlobMeta("key1", subject)).thenReturn(null);
when(blobStoreMock.getBlobMeta("key2", subject)).thenThrow(new KeyNotFoundException());
assertThrows(InvalidTopologyException.class,
() -> Utils.validateTopologyBlobStoreMap(topoConf, blobStoreMock));
}
@Test
public void testValidateTopologyBlobStoreMissingKey() throws Throwable {
Map<String, Object> topoConf = new HashMap<>();
Map<String, Map<String, String>> topologyMap = new HashMap<>();
topologyMap.put("key1", new HashMap<>());
topologyMap.put("key2", new HashMap<>());
topoConf.put(Config.TOPOLOGY_BLOBSTORE_MAP, topologyMap);
NimbusBlobStore nimbusBlobStoreMock = mock(NimbusBlobStore.class);
when(nimbusBlobStoreMock.getBlobMeta("key1")).thenReturn(null);
when(nimbusBlobStoreMock.getBlobMeta("key2")).thenThrow(new KeyNotFoundException());
assertThrows(InvalidTopologyException.class,
() -> Utils.validateTopologyBlobStoreMap(topoConf, nimbusBlobStoreMock));
}
@Test
public void testValidateTopologyBlobStoreMap() throws InvalidTopologyException, AuthorizationException,
KeyNotFoundException {
Map<String, Object> topoConf = new HashMap<>();
Map<String, Map<String, Object>> topologyMap = new HashMap<>();
Map<String, Object> blobConf = new HashMap<>();
blobConf.put("uncompress", false);
topologyMap.put("key1", blobConf);
topologyMap.put("key2", blobConf);
topoConf.put(Config.TOPOLOGY_BLOBSTORE_MAP, topologyMap);
NimbusBlobStore nimbusBlobStoreMock = mock(NimbusBlobStore.class);
when(nimbusBlobStoreMock.getBlobMeta("key1")).thenReturn(null);
when(nimbusBlobStoreMock.getBlobMeta("key2")).thenReturn(null);
Utils.validateTopologyBlobStoreMap(topoConf, nimbusBlobStoreMock);
}
@Test
public void testValidateTopologyBlobStoreMapInvalidOption() {
Map<String, Object> topoConf = new HashMap<>();
Map<String, Map<String, Object>> topologyMap = new HashMap<>();
Map<String, Object> blobConf = new HashMap<>();
blobConf.put("uncompress", "false");
topologyMap.put("key1", blobConf);
topologyMap.put("key2", blobConf);
topoConf.put(Config.TOPOLOGY_BLOBSTORE_MAP, topologyMap);
NimbusBlobStore nimbusBlobStoreMock = mock(NimbusBlobStore.class);
assertThrows(InvalidTopologyException.class,
() -> Utils.validateTopologyBlobStoreMap(topoConf, nimbusBlobStoreMock));
}
@Test
public void defaultYamlTest() {
Map<String, Object> conf = Utils.readStormConfig();
ConfigValidation.validateFields(conf);
}
@Test
public void testTopologyWorkersIsInteger() {
Map<String, Object> conf = new HashMap<>();
conf.put(Config.TOPOLOGY_WORKERS, 42);
ConfigValidation.validateFields(conf);
conf.put(Config.TOPOLOGY_WORKERS, 3.14159);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(conf));
}
@Test
public void testTopologyStatsSampleRateIsFloat() {
Map<String, Object> conf = new HashMap<>();
conf.put(Config.TOPOLOGY_STATS_SAMPLE_RATE, 0.5);
ConfigValidation.validateFields(conf);
conf.put(Config.TOPOLOGY_STATS_SAMPLE_RATE, 10);
ConfigValidation.validateFields(conf);
conf.put(Config.TOPOLOGY_STATS_SAMPLE_RATE, Double.MAX_VALUE);
ConfigValidation.validateFields(conf);
}
@Test
public void testWorkerChildoptsIsStringOrStringList() {
Map<String, Object> conf = new HashMap<>();
Collection<Object> passCases = new LinkedList<>();
Collection<Object> failCases = new LinkedList<>();
passCases.add(null);
passCases.add("some string");
String[] stuff = { "some", "string", "list" };
passCases.add(Arrays.asList(stuff));
failCases.add(42);
Integer[] wrongStuff = { 1, 2, 3 };
failCases.add(Arrays.asList(wrongStuff));
//worker.childopts validates
for (Object value : passCases) {
conf.put(Config.WORKER_CHILDOPTS, value);
ConfigValidation.validateFields(conf);
}
for (Object value : failCases) {
conf.put(Config.WORKER_CHILDOPTS, value);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(conf));
}
//topology.worker.childopts validates
conf.clear();
for (Object value : passCases) {
conf.put(Config.TOPOLOGY_WORKER_CHILDOPTS, value);
ConfigValidation.validateFields(conf);
}
for (Object value : failCases) {
conf.put(Config.TOPOLOGY_WORKER_CHILDOPTS, value);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(conf));
}
}
@Test
public void testValidity() {
Map<String, Object> conf = new HashMap<>();
conf.put(Config.TOPOLOGY_DEBUG, true);
conf.put("q", "asasdasd");
conf.put("aaa", Integer.valueOf("123"));
conf.put("bbb", Long.valueOf("456"));
List<Object> testList = new ArrayList<>();
testList.add(1);
testList.add(2);
testList.add(Integer.valueOf("3"));
testList.add(Long.valueOf("4"));
testList.add(new Float("3"));
testList.add(new Double("4"));
testList.add(ImmutableList.of("asdf", 3));
conf.put("eee", testList);
assertTrue(Utils.isValidConf(conf));
}
@Test
public void testNonValidConfigChar() {
Map<String, Object> conf = new HashMap<>();
conf.put("q", ImmutableList.of("asdf", 'c'));
assertFalse(Utils.isValidConf(conf));
}
@Test
public void testNonValidConfigRandomObject() {
Map<String, Object> conf = new HashMap<>();
conf.put("q", ImmutableList.of("asdf", new TestConfigValidate()));
assertFalse(Utils.isValidConf(conf));
}
@Test
public void testKryoRegValidator() {
KryoRegValidator validator = new KryoRegValidator();
// fail cases
Object[] failCases = { ImmutableMap.of("f", "g"), ImmutableList.of(1),
Collections.singletonList(ImmutableMap.of("a", 1))};
for (Object value : failCases) {
assertThrows(IllegalArgumentException.class, () -> validator.validateField("test", value));
}
// pass cases
validator.validateField("test", Arrays.asList("a", "b", "c", ImmutableMap.of("d", "e"), ImmutableMap.of("f", "g")));
}
@Test
public void testPowerOf2Validator() {
PowerOf2Validator validator = new PowerOf2Validator();
Object[] failCases = { 42.42, 42, -33, 23423423423.0, -32, -1, -0.00001, 0, -0, "Forty-two" };
for (Object value : failCases) {
assertThrows(IllegalArgumentException.class, () -> validator.validateField("test", value));
}
Object[] passCases = { 64, 4294967296.0, 1, null };
for (Object value : passCases) {
validator.validateField("test", value);
}
}
@Test
public void testPositiveNumberValidator() {
PositiveNumberValidator validator = new PositiveNumberValidator();
Object[] passCases = { null, 1.0, 0.01, 1, 2147483647, 42 };
for (Object value : passCases) {
validator.validateField("test", value);
}
Object[] failCases = { -1.0, -1, -0.01, 0.0, 0, "43", "string" };
for (Object value : failCases) {
assertThrows(IllegalArgumentException.class, () -> validator.validateField("test", value));
}
Object[] passCasesIncludeZero = { null, 1.0, 0.01, 0, 2147483647, 0.0 };
for (Object value : passCasesIncludeZero) {
validator.validateField("test", true, value);
}
Object[] failCasesIncludeZero = { -1.0, -1, -0.01, "43", "string" };
for (Object value : failCasesIncludeZero) {
assertThrows(IllegalArgumentException.class, () -> validator.validateField("test", true, value));
}
}
@Test
public void testIntegerValidator() {
IntegerValidator validator = new IntegerValidator();
Object[] passCases = { null, 1000, Integer.MAX_VALUE };
for (Object value : passCases) {
validator.validateField("test", value);
}
Object[] failCases = { 1.34, (long) Integer.MAX_VALUE + 1 };
for (Object value : failCases) {
assertThrows(IllegalArgumentException.class, () -> validator.validateField("test", value));
}
}
@Test
public void testLongValidator() {
LongValidator validator = new LongValidator();
Object[] passCases = { null, 1000, Integer.MAX_VALUE, Long.MAX_VALUE };
for (Object value : passCases) {
validator.validateField("test", value);
}
Object[] failCases = { 1.34, BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.valueOf(1L))};
for (Object value : failCases) {
assertThrows(IllegalArgumentException.class, () -> validator.validateField("test", value));
}
}
@Test
public void NoDuplicateInListValidator() {
NoDuplicateInListValidator validator = new NoDuplicateInListValidator();
Collection<Object> passCases = new LinkedList<>();
Collection<Object> failCases = new LinkedList<>();
Object[] passCase1 = { 1000, 0, -1000 };
Object[] passCase2 = { "one", "two", "three" };
Object[] passCase3 = { false, true };
Object[] passCase4 = { false, true, 1000, 0, -1000, "one", "two", "three" };
Object[] passCase5 = { 1000.0, 0.0, -1000.0 };
passCases.add(Arrays.asList(passCase1));
passCases.add(Arrays.asList(passCase2));
passCases.add(Arrays.asList(passCase3));
passCases.add(Arrays.asList(passCase4));
passCases.add(Arrays.asList(passCase5));
passCases.add(null);
for (Object value : passCases) {
validator.validateField("test", value);
}
Object[] failCase1 = { 1000, 0, 1000 };
Object[] failCase2 = { "one", "one", "two" };
Object[] failCase3 = { 5.0, 5.0, 6 };
failCases.add(Arrays.asList(failCase1));
failCases.add(Arrays.asList(failCase2));
failCases.add(Arrays.asList(failCase3));
for (Object value : failCases) {
assertThrows(IllegalArgumentException.class, () -> validator.validateField("test", value));
}
}
@Test
public void testListEntryTypeValidator() {
Collection<Object> testCases1 = new LinkedList<>();
Collection<Object> testCases2 = new LinkedList<>();
Collection<Object> testCases3 = new LinkedList<>();
Object[] testCase1 = { "one", "two", "three" };
Object[] testCase2 = { "three" };
testCases1.add(Arrays.asList(testCase1));
testCases1.add(Arrays.asList(testCase2));
for (Object value : testCases1) {
ListEntryTypeValidator.validateField("test", String.class, value);
}
for (Object value : testCases1) {
assertThrows(IllegalArgumentException.class, () -> ListEntryTypeValidator.validateField("test", Number.class, value));
}
Object[] testCase3 = { 1000, 0, 1000 };
Object[] testCase4 = { 5 };
Object[] testCase5 = { 5.0, 5.0, 6 };
testCases2.add(Arrays.asList(testCase3));
testCases2.add(Arrays.asList(testCase4));
testCases2.add(Arrays.asList(testCase5));
for (Object value : testCases2) {
assertThrows(IllegalArgumentException.class, () -> ListEntryTypeValidator.validateField("test", String.class, value));
}
for (Object value : testCases2) {
ListEntryTypeValidator.validateField("test", Number.class, value);
}
Object[] testCase6 = { 1000, 0, 1000, "5" };
Object[] testCase7 = { "4", "5", 5 };
testCases3.add(Arrays.asList(testCase6));
testCases3.add(Arrays.asList(testCase7));
for (Object value : testCases3) {
assertThrows(IllegalArgumentException.class, () -> ListEntryTypeValidator.validateField("test", String.class, value));
}
for (Object value : testCases1) {
assertThrows(IllegalArgumentException.class, () -> ListEntryTypeValidator.validateField("test", Number.class, value));
}
}
@Test
public void testMapEntryTypeAnnotation() {
TestConfig config = new TestConfig();
Collection<Object> passCases = new LinkedList<>();
Collection<Object> failCases = new LinkedList<>();
Map<Object, Object> passCase1 = new HashMap<>();
passCase1.put("aaa", 5);
passCase1.put("bbb", 6);
passCase1.put("ccc", 7);
passCases.add(passCase1);
passCases.add(null);
for (Object value : passCases) {
config.put(TestConfig.TEST_MAP_CONFIG, value);
ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class));
}
Map<Object, Object> failCase1 = new HashMap<>();
failCase1.put("aaa", 5);
failCase1.put(5, 6);
failCase1.put("ccc", 7);
Map<Object, Object> failCase2 = new HashMap<>();
failCase2.put("aaa", "str");
failCase2.put("bbb", 6);
failCase2.put("ccc", 7);
failCases.add(failCase1);
failCases.add(failCase2);
for (Object value : failCases) {
config.put(TestConfig.TEST_MAP_CONFIG, value);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class)));
}
}
@Test
public void testMapEntryCustomAnnotation() {
TestConfig config = new TestConfig();
Collection<Object> passCases = new LinkedList<>();
Collection<Object> failCases = new LinkedList<>();
Map<Object, Object> passCase1 = new HashMap<>();
passCase1.put("aaa", 5);
passCase1.put("bbb", 100);
passCase1.put("ccc", Integer.MAX_VALUE);
passCases.add(passCase1);
passCases.add(null);
for (Object value : passCases) {
config.put(TestConfig.TEST_MAP_CONFIG_2, value);
ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class));
}
Map<Object, Object> failCase1 = new HashMap<>();
failCase1.put("aaa", 5);
failCase1.put(5, 6);
failCase1.put("ccc", 7);
Map<Object, Object> failCase2 = new HashMap<>();
failCase2.put("aaa", "str");
failCase2.put("bbb", 6);
failCase2.put("ccc", 7);
Map<Object, Object> failCase3 = new HashMap<>();
failCase3.put("aaa", -1);
failCase3.put("bbb", 6);
failCase3.put("ccc", 7);
Map<Object, Object> failCase4 = new HashMap<>();
failCase4.put("aaa", 1);
failCase4.put("bbb", 6);
failCase4.put("ccc", 7.4);
failCases.add(failCase1);
failCases.add(failCase2);
failCases.add(failCase3);
failCases.add(failCase4);
for (Object value : failCases) {
config.put(TestConfig.TEST_MAP_CONFIG_2, value);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class)));
}
}
@Test
public void testExactlyOneOfCustomAnnotation() {
TestConfig config = new TestConfig();
Collection<Object> passCases = new LinkedList<>();
Collection<Object> failCases = new LinkedList<>();
List<Object> passCaseListOfList = new ArrayList<>();
passCaseListOfList.add(Arrays.asList("comp1", "comp2"));
passCaseListOfList.add(Arrays.asList("comp1", "comp3"));
passCaseListOfList.add(Arrays.asList("comp2", "comp4"));
passCaseListOfList.add(Arrays.asList("comp2", "comp5"));
Map<Object, Object> passCaseMapOfMap = new HashMap<>();
passCaseMapOfMap.put("comp1",
Stream.of(new Object[][] {
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_MAX_NODE_CO_LOCATION_CNT, 10 },
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_INCOMPATIBLE_COMPONENTS, Arrays.asList("comp2", "comp3")},
}).collect(Collectors.toMap(data -> data[0], data -> data[1]))
);
passCaseMapOfMap.put("comp2",
Stream.of(new Object[][] {
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_MAX_NODE_CO_LOCATION_CNT, 2 },
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_INCOMPATIBLE_COMPONENTS, Arrays.asList("comp4", "comp5")},
}).collect(Collectors.toMap(data -> data[0], data -> data[1]))
);
passCases.add(passCaseMapOfMap);
passCaseMapOfMap = new HashMap<>();
passCaseMapOfMap.put("comp1",
Stream.of(new Object[][] {
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_INCOMPATIBLE_COMPONENTS, Arrays.asList("comp2", "comp3")},
}).collect(Collectors.toMap(data -> data[0], data -> data[1]))
);
passCaseMapOfMap.put("comp2",
Stream.of(new Object[][] {
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_MAX_NODE_CO_LOCATION_CNT, 2 },
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_INCOMPATIBLE_COMPONENTS, Arrays.asList("comp4", "comp5")},
}).collect(Collectors.toMap(data -> data[0], data -> data[1]))
);
passCases.add(passCaseMapOfMap);
passCaseMapOfMap = new HashMap<>();
passCaseMapOfMap.put("comp1",
Stream.of(new Object[][] {
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_INCOMPATIBLE_COMPONENTS, "comp2"},
}).collect(Collectors.toMap(data -> data[0], data -> data[1]))
);
passCaseMapOfMap.put("comp2",
Stream.of(new Object[][] {
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_MAX_NODE_CO_LOCATION_CNT, 2 },
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_INCOMPATIBLE_COMPONENTS, "comp4"},
}).collect(Collectors.toMap(data -> data[0], data -> data[1]))
);
passCases.add(passCaseMapOfMap);
for (Object value : passCases) {
config.put(TestConfig.TEST_MAP_CONFIG_9, value);
ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class));
}
List<Object> failCaseList = new ArrayList<>();
failCaseList.add(Arrays.asList("comp1", Arrays.asList("comp2", "comp3")));
failCaseList.add(Arrays.asList("comp3", Arrays.asList("comp4", "comp5")));
failCases.add(failCaseList);
Map<String, Object> failCaseMapOfMap = new HashMap<>();
failCaseMapOfMap.put("comp1",
Stream.of(new Object[][] {
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_MAX_NODE_CO_LOCATION_CNT, 10 },
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_INCOMPATIBLE_COMPONENTS, Arrays.asList(1, 2, 3)},
}).collect(Collectors.toMap(data -> data[0], data -> data[1]))
);
failCaseMapOfMap.put("comp2",
Stream.of(new Object[][] {
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_MAX_NODE_CO_LOCATION_CNT, 2 },
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_INCOMPATIBLE_COMPONENTS, Arrays.asList("comp4", "comp5")},
}).collect(Collectors.toMap(data -> data[0], data -> data[1]))
);
failCases.add(failCaseMapOfMap);
failCaseMapOfMap = new HashMap<>();
failCaseMapOfMap.put("comp1",
Stream.of(new Object[][] {
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_MAX_NODE_CO_LOCATION_CNT, 10 },
{ RasConstraintsTypeValidator.CONSTRAINT_TYPE_INCOMPATIBLE_COMPONENTS, Arrays.asList("comp1", 3)},
}).collect(Collectors.toMap(data -> data[0], data -> data[1]))
);
failCases.add(failCaseMapOfMap);
failCaseMapOfMap = new HashMap<>();
failCaseMapOfMap.put("comp1", Arrays.asList("comp2", "comp3"));
failCaseMapOfMap.put("comp2", Arrays.asList("comp4", "comp5"));
failCases.add(failCaseMapOfMap);
failCaseMapOfMap = new HashMap<>();
failCaseMapOfMap.put("aaa", "str");
failCaseMapOfMap.put("bbb", 6);
failCaseMapOfMap.put("ccc", 7);
failCases.add(failCaseMapOfMap);
failCaseMapOfMap = new HashMap<>();
failCaseMapOfMap.put("aaa", -1);
failCaseMapOfMap.put("bbb", 6);
failCaseMapOfMap.put("ccc", 7);
failCases.add(failCaseMapOfMap);
failCaseMapOfMap = new HashMap<>();
failCaseMapOfMap.put("aaa", 1);
failCaseMapOfMap.put("bbb", 6);
failCaseMapOfMap.put("ccc", 7.4);
failCases.add(failCaseMapOfMap);
failCaseMapOfMap = new HashMap<>();
failCaseMapOfMap.put("comp1", "comp2");
failCaseMapOfMap.put("comp2", "comp4");
failCases.add(failCaseMapOfMap);
failCases.add(null);
for (Object value : failCases) {
config.put(TestConfig.TEST_MAP_CONFIG_9, value);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class)));
}
}
@Test
public void testListEntryTypeAnnotation() {
TestConfig config = new TestConfig();
Collection<Object> passCases = new LinkedList<>();
Collection<Object> failCases = new LinkedList<>();
Object[] passCase1 = { 1, 5.0, -0.01, 0, Integer.MAX_VALUE, Double.MIN_VALUE };
Object[] passCase2 = { 1 };
passCases.add(Arrays.asList(passCase1));
passCases.add(Arrays.asList(passCase2));
for (Object value : passCases) {
config.put(TestConfig.TEST_MAP_CONFIG_3, value);
ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class));
}
Object[] failCase1 = { 1, 5.0, -0.01, 0, "aaa" };
Object[] failCase2 = { "aaa" };
failCases.add(failCase1);
failCases.add(failCase2);
failCases.add(1);
failCases.add("b");
failCases.add(null);
for (Object value : failCases) {
config.put(TestConfig.TEST_MAP_CONFIG_3, value);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class)));
}
}
@Test
public void testListEntryCustomAnnotation() {
TestConfig config = new TestConfig();
Collection<Object> passCases = new LinkedList<>();
Collection<Object> failCases = new LinkedList<>();
Object[] passCase1 = { 1, 5.0, 0.01, Double.MAX_VALUE };
Object[] passCase2 = { 1 };
passCases.add(Arrays.asList(passCase1));
passCases.add(Arrays.asList(passCase2));
for (Object value : passCases) {
config.put(TestConfig.TEST_MAP_CONFIG_4, value);
ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class));
}
Object[] failCase1 = { 1, 5.0, -0.01, 3.0 };
Object[] failCase2 = { 1, 5.0, -0.01, 1 };
Object[] failCase3 = { "aaa", "bbb", "aaa" };
Object[] failCase4 = { 1, 5.0, null, 1 };
Object[] failCase5 = { 1, 5.0, 0, 1 };
failCases.add(Arrays.asList(failCase1));
failCases.add(Arrays.asList(failCase2));
failCases.add(Arrays.asList(failCase3));
failCases.add(Arrays.asList(failCase4));
failCases.add(Arrays.asList(failCase5));
failCases.add(1);
failCases.add("b");
for (Object value : failCases) {
config.put(TestConfig.TEST_MAP_CONFIG_4, value);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class)));
}
}
@Test
public void TestAcceptedStrings() {
TestConfig config = new TestConfig();
String[] passCases = { "aaa", "bbb", "ccc" };
for (Object value : passCases) {
config.put(TestConfig.TEST_MAP_CONFIG_5, value);
ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class));
}
String[] failCases = { "aa", "bb", "cc", "abc", "a", "b", "c", "" };
for (Object value : failCases) {
config.put(TestConfig.TEST_MAP_CONFIG_5, value);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class)));
}
}
@Test
public void TestImpersonationAclUserEntryValidator() {
TestConfig config = new TestConfig();
Collection<Object> passCases = new LinkedList<>();
Collection<Object> failCases = new LinkedList<>();
Map<String, Map<String, List<String>>> passCase1 = new HashMap<>();
Map<String, List<String>> passCase1_hostsAndGroups = new HashMap<>();
String[] hosts = { "host.1", "host.2", "host.3" };
passCase1_hostsAndGroups.put("hosts", Arrays.asList(hosts));
String[] groups = { "group.1", "group.2", "group.3" };
passCase1_hostsAndGroups.put("groups", Arrays.asList(groups));
passCase1.put("jerry", passCase1_hostsAndGroups);
passCases.add(passCase1);
for (Object value : passCases) {
config.put(TestConfig.TEST_MAP_CONFIG_6, value);
ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class));
}
Map<String, Map<String, List<String>>> failCase1 = new HashMap<>();
Map<String, List<String>> failCase1_hostsAndGroups = new HashMap<>();
failCase1_hostsAndGroups.put("hosts", Arrays.asList(hosts));
failCase1.put("jerry", failCase1_hostsAndGroups);
Map<String, Map<String, List<String>>> failCase2 = new HashMap<>();
Map<String, List<String>> failCase2_hostsAndGroups = new HashMap<>();
String[] failgroups = { "group.1", "group.2", "group.3" };
failCase2_hostsAndGroups.put("groups", Arrays.asList(groups));
failCase2.put("jerry", failCase2_hostsAndGroups);
failCases.add(failCase1);
failCases.add(failCase2);
failCases.add("stuff");
failCases.add(5);
for (Object value : failCases) {
config.put(TestConfig.TEST_MAP_CONFIG_6, value);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class)));
}
}
@Test
public void TestResourceAwareSchedulerUserPool() {
TestConfig config = new TestConfig();
Collection<Object> failCases = new LinkedList<>();
Map<String, Map<String, Integer>> passCase1 = new HashMap<>();
passCase1.put("jerry", new HashMap<>());
passCase1.put("bobby", new HashMap<>());
passCase1.put("derek", new HashMap<>());
passCase1.get("jerry").put("cpu", 10000);
passCase1.get("jerry").put("memory", 20148);
passCase1.get("bobby").put("cpu", 20000);
passCase1.get("bobby").put("memory", 40148);
passCase1.get("derek").put("cpu", 30000);
passCase1.get("derek").put("memory", 60148);
config.put(TestConfig.TEST_MAP_CONFIG_7, passCase1);
ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class));
Map<String, Map<String, Integer>> failCase1 = new HashMap<>();
failCase1.put("jerry", new HashMap<>());
failCase1.put("bobby", new HashMap<>());
failCase1.put("derek", new HashMap<>());
failCase1.get("jerry").put("cpu", 10000);
failCase1.get("jerry").put("memory", 20148);
failCase1.get("bobby").put("cpu", 20000);
failCase1.get("bobby").put("memory", 40148);
//this will fail the test since user derek does not have an entry for memory
failCase1.get("derek").put("cpu", 30000);
Map<String, Map<String, Integer>> failCase2 = new HashMap<>();
//this will fail since jerry doesn't have either cpu or memory entries
failCase2.put("jerry", new HashMap<>());
failCase2.put("bobby", new HashMap<>());
failCase2.put("derek", new HashMap<>());
failCase2.get("bobby").put("cpu", 20000);
failCase2.get("bobby").put("memory", 40148);
failCase2.get("derek").put("cpu", 30000);
failCase2.get("derek").put("memory", 60148);
failCases.add(failCase1);
failCases.add(failCase2);
for (Object value : failCases) {
config.put(TestConfig.TEST_MAP_CONFIG_7, value);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class)));
}
}
@Test
public void TestImplementsClassValidator() {
TestConfig config = new TestConfig();
Collection<Object> passCases = new LinkedList<>();
Collection<Object> failCases = new LinkedList<>();
passCases.add("org.apache.storm.networktopography.DefaultRackDNSToSwitchMapping");
for (Object value : passCases) {
config.put(TestConfig.TEST_MAP_CONFIG_8, value);
ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class));
}
//will fail since org.apache.storm.nimbus.NimbusInfo doesn't implement or extend org.apache.storm.networktopography
// .DNSToSwitchMapping
failCases.add("org.apache.storm.nimbus.NimbusInfo");
failCases.add(null);
for (Object value : failCases) {
config.put(TestConfig.TEST_MAP_CONFIG_8, value);
assertThrows(IllegalArgumentException.class, () -> ConfigValidation.validateFields(config, Collections.singletonList(TestConfig.class)));
}
}
public static class TestConfig extends HashMap<String, Object> {
@IsMapEntryType(keyType = String.class, valueType = Integer.class)
public static final String TEST_MAP_CONFIG = "test.map.config";
@IsMapEntryCustom(
keyValidatorClasses = { StringValidator.class },
valueValidatorClasses = { PositiveNumberValidator.class, IntegerValidator.class })
public static final String TEST_MAP_CONFIG_2 = "test.map.config.2";
@IsListEntryType(type = Number.class)
@NotNull
public static final String TEST_MAP_CONFIG_3 = "test.map.config.3";
@IsListEntryCustom(
entryValidatorClasses = { PositiveNumberValidator.class, NotNullValidator.class })
@IsNoDuplicateInList
public static final String TEST_MAP_CONFIG_4 = "test.map.config.4";
@IsString(acceptedValues = { "aaa", "bbb", "ccc" })
public static final String TEST_MAP_CONFIG_5 = "test.map.config.5";
@IsMapEntryCustom(keyValidatorClasses = { StringValidator.class }, valueValidatorClasses = { ImpersonationAclUserEntryValidator
.class })
public static final String TEST_MAP_CONFIG_6 = "test.map.config.6";
@IsMapEntryCustom(keyValidatorClasses = { StringValidator.class }, valueValidatorClasses = { UserResourcePoolEntryValidator.class })
public static final String TEST_MAP_CONFIG_7 = "test.map.config.7";
@IsImplementationOfClass(implementsClass = org.apache.storm.networktopography.DNSToSwitchMapping.class)
@NotNull
public static final String TEST_MAP_CONFIG_8 = "test.map.config.8";
@IsExactlyOneOf(valueValidatorClasses = {ListOfListOfStringValidator.class, RasConstraintsTypeValidator.class})
@NotNull
public static final String TEST_MAP_CONFIG_9 = "test.map.config.9";
}
}
|
apache/syncope | 38,046 | core/persistence-neo4j/src/test/java/org/apache/syncope/core/persistence/neo4j/inner/AnySearchTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.core.persistence.neo4j.inner;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.syncope.common.lib.SyncopeConstants;
import org.apache.syncope.common.lib.types.AnyTypeKind;
import org.apache.syncope.common.lib.types.IdRepoEntitlement;
import org.apache.syncope.core.persistence.api.dao.AnyObjectDAO;
import org.apache.syncope.core.persistence.api.dao.AnySearchDAO;
import org.apache.syncope.core.persistence.api.dao.AnyTypeDAO;
import org.apache.syncope.core.persistence.api.dao.GroupDAO;
import org.apache.syncope.core.persistence.api.dao.RealmDAO;
import org.apache.syncope.core.persistence.api.dao.RealmSearchDAO;
import org.apache.syncope.core.persistence.api.dao.RoleDAO;
import org.apache.syncope.core.persistence.api.dao.UserDAO;
import org.apache.syncope.core.persistence.api.dao.search.AnyCond;
import org.apache.syncope.core.persistence.api.dao.search.AnyTypeCond;
import org.apache.syncope.core.persistence.api.dao.search.AttrCond;
import org.apache.syncope.core.persistence.api.dao.search.AuxClassCond;
import org.apache.syncope.core.persistence.api.dao.search.MemberCond;
import org.apache.syncope.core.persistence.api.dao.search.MembershipCond;
import org.apache.syncope.core.persistence.api.dao.search.RelationshipTypeCond;
import org.apache.syncope.core.persistence.api.dao.search.ResourceCond;
import org.apache.syncope.core.persistence.api.dao.search.RoleCond;
import org.apache.syncope.core.persistence.api.dao.search.SearchCond;
import org.apache.syncope.core.persistence.api.entity.AnyType;
import org.apache.syncope.core.persistence.api.entity.PlainAttr;
import org.apache.syncope.core.persistence.api.entity.anyobject.AMembership;
import org.apache.syncope.core.persistence.api.entity.anyobject.AnyObject;
import org.apache.syncope.core.persistence.api.entity.group.Group;
import org.apache.syncope.core.persistence.api.entity.user.User;
import org.apache.syncope.core.persistence.api.utils.FormatUtils;
import org.apache.syncope.core.persistence.api.utils.RealmUtils;
import org.apache.syncope.core.persistence.neo4j.AbstractTest;
import org.apache.syncope.core.spring.security.AuthContextUtils;
import org.apache.syncope.core.spring.security.SyncopeAuthenticationDetails;
import org.apache.syncope.core.spring.security.SyncopeGrantedAuthority;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class AnySearchTest extends AbstractTest {
private static final String LOGIN_DATE_VALUE = "2009-05-26";
@Autowired
private UserDAO userDAO;
@Autowired
private AnyObjectDAO anyObjectDAO;
@Autowired
private GroupDAO groupDAO;
@Autowired
private AnySearchDAO searchDAO;
@Autowired
private AnyTypeDAO anyTypeDAO;
@Autowired
private RealmDAO realmDAO;
@Autowired
private RealmSearchDAO realmSearchDAO;
@Autowired
private RoleDAO roleDAO;
@BeforeEach
public void adjustLoginDateForLocalSystem() throws ParseException {
User rossini = userDAO.findByUsername("rossini").orElseThrow();
PlainAttr loginDate = rossini.getPlainAttr("loginDate").get();
loginDate.getValues().getFirst().setDateValue(FormatUtils.parseDate(LOGIN_DATE_VALUE, "yyyy-MM-dd"));
userDAO.save(rossini);
}
@Test
public void orOfThree() {
AnyCond cond1 = new AnyCond(AttrCond.Type.EQ);
cond1.setSchema("username");
cond1.setExpression("rossini");
AnyCond cond2 = new AnyCond(AttrCond.Type.EQ);
cond2.setSchema("username");
cond2.setExpression("puccini");
AnyCond cond3 = new AnyCond(AttrCond.Type.EQ);
cond3.setSchema("username");
cond3.setExpression("notfound");
SearchCond cond = SearchCond.or(List.of(
SearchCond.of(cond1), SearchCond.of(cond2), SearchCond.of(cond3)));
assertTrue(cond.isValid());
List<User> users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(2, users.size());
assertTrue(users.stream().anyMatch(u -> "rossini".equals(u.getUsername())));
assertTrue(users.stream().anyMatch(u -> "puccini".equals(u.getUsername())));
}
@Test
public void searchTwoPlainSchemas() {
AttrCond firstnameCond = new AttrCond(AttrCond.Type.EQ);
firstnameCond.setSchema("firstname");
firstnameCond.setExpression("Gioacchino");
AttrCond surnameCond = new AttrCond(AttrCond.Type.EQ);
surnameCond.setSchema("surname");
surnameCond.setExpression("Rossini");
SearchCond cond = SearchCond.and(SearchCond.of(firstnameCond), SearchCond.of(surnameCond));
assertTrue(cond.isValid());
List<User> users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
surnameCond = new AttrCond(AttrCond.Type.EQ);
surnameCond.setSchema("surname");
surnameCond.setExpression("Verdi");
cond = SearchCond.and(SearchCond.of(firstnameCond), SearchCond.negate(surnameCond));
assertTrue(cond.isValid());
users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
AttrCond fullnameCond = new AttrCond(AttrCond.Type.EQ);
fullnameCond.setSchema("fullname");
fullnameCond.setExpression("Vincenzo Bellini");
AttrCond userIdCond = new AttrCond(AttrCond.Type.EQ);
userIdCond.setSchema("userId");
userIdCond.setExpression("bellini@apache.org");
cond = SearchCond.and(SearchCond.of(fullnameCond), SearchCond.of(userIdCond));
assertTrue(cond.isValid());
users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
userIdCond = new AttrCond(AttrCond.Type.EQ);
userIdCond.setSchema("userId");
userIdCond.setExpression("rossini@apache.org");
cond = SearchCond.and(SearchCond.of(fullnameCond), SearchCond.negate(userIdCond));
assertTrue(cond.isValid());
users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
}
@Test
public void searchWithLikeCondition() {
AttrCond fullnameLeafCond = new AttrCond(AttrCond.Type.LIKE);
fullnameLeafCond.setSchema("fullname");
fullnameLeafCond.setExpression("%o%");
MembershipCond groupCond = new MembershipCond();
groupCond.setGroup("root");
AttrCond loginDateCond = new AttrCond(AttrCond.Type.EQ);
loginDateCond.setSchema("loginDate");
loginDateCond.setExpression(LOGIN_DATE_VALUE);
SearchCond subCond = SearchCond.and(
SearchCond.of(fullnameLeafCond), SearchCond.of(groupCond));
assertTrue(subCond.isValid());
SearchCond cond = SearchCond.and(subCond, SearchCond.of(loginDateCond));
assertTrue(cond.isValid());
List<User> users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
}
@Test
public void searchCaseInsensitiveWithLikeCondition() {
AttrCond fullnameLeafCond = new AttrCond(AttrCond.Type.ILIKE);
fullnameLeafCond.setSchema("fullname");
fullnameLeafCond.setExpression("%O%");
MembershipCond groupCond = new MembershipCond();
groupCond.setGroup("root");
AttrCond loginDateCond = new AttrCond(AttrCond.Type.EQ);
loginDateCond.setSchema("loginDate");
loginDateCond.setExpression(LOGIN_DATE_VALUE);
SearchCond subCond = SearchCond.and(
SearchCond.of(fullnameLeafCond), SearchCond.of(groupCond));
assertTrue(subCond.isValid());
SearchCond cond = SearchCond.and(subCond, SearchCond.of(loginDateCond));
assertTrue(cond.isValid());
List<User> users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
}
@Test
public void searchWithNotAttrCond() {
AttrCond fullnameLeafCond = new AttrCond(AttrCond.Type.EQ);
fullnameLeafCond.setSchema("fullname");
fullnameLeafCond.setExpression("Giuseppe Verdi");
SearchCond cond = SearchCond.negate(fullnameLeafCond);
assertTrue(cond.isValid());
List<User> users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(4, users.size());
Set<String> ids = users.stream().map(User::getKey).collect(Collectors.toSet());
assertTrue(ids.contains("1417acbe-cbf6-4277-9372-e75e04f97000"));
assertTrue(ids.contains("b3cbc78d-32e6-4bd4-92e0-bbe07566a2ee"));
}
@Test
public void searchWithNotAnyCond() {
AnyCond usernameLeafCond = new AnyCond(AttrCond.Type.EQ);
usernameLeafCond.setSchema("username");
usernameLeafCond.setExpression("verdi");
SearchCond cond = SearchCond.negate(usernameLeafCond);
assertTrue(cond.isValid());
List<User> users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(4, users.size());
assertTrue(users.stream().noneMatch(user -> "verdi".equals(user.getUsername())));
}
@Test
public void searchCaseInsensitiveWithNotCondition() {
AttrCond fullnameLeafCond = new AttrCond(AttrCond.Type.IEQ);
fullnameLeafCond.setSchema("fullname");
fullnameLeafCond.setExpression("giuseppe verdi");
SearchCond cond = SearchCond.negate(fullnameLeafCond);
assertTrue(cond.isValid());
List<User> users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(4, users.size());
Set<String> ids = users.stream().map(User::getKey).collect(Collectors.toSet());
assertTrue(ids.contains("1417acbe-cbf6-4277-9372-e75e04f97000"));
assertTrue(ids.contains("b3cbc78d-32e6-4bd4-92e0-bbe07566a2ee"));
}
@Test
public void searchByBoolean() {
AttrCond coolLeafCond = new AttrCond(AttrCond.Type.EQ);
coolLeafCond.setSchema("cool");
coolLeafCond.setExpression("true");
SearchCond cond = SearchCond.of(coolLeafCond);
assertTrue(cond.isValid());
List<User> users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
assertEquals("c9b2dec2-00a7-4855-97c0-d854842b4b24", users.getFirst().getKey());
}
@Test
public void searchByRealm() {
AnyCond anyCond = new AnyCond(AttrCond.Type.EQ);
anyCond.setSchema("realm");
anyCond.setExpression("c5b75db1-fce7-470f-b780-3b9934d82a9d");
List<User> users = searchDAO.search(SearchCond.of(anyCond), AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
assertEquals("rossini", users.getFirst().getUsername());
anyCond.setExpression("/even");
users = searchDAO.search(SearchCond.of(anyCond), AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
assertEquals("rossini", users.getFirst().getUsername());
}
@Test
public void searchByPageAndSize() {
AttrCond fullnameLeafCond = new AttrCond(AttrCond.Type.LIKE);
fullnameLeafCond.setSchema("fullname");
fullnameLeafCond.setExpression("%o%");
MembershipCond groupCond = new MembershipCond();
groupCond.setGroup("root");
AttrCond loginDateCond = new AttrCond(AttrCond.Type.EQ);
loginDateCond.setSchema("loginDate");
loginDateCond.setExpression(LOGIN_DATE_VALUE);
SearchCond subCond = SearchCond.and(
SearchCond.of(fullnameLeafCond), SearchCond.of(groupCond));
assertTrue(subCond.isValid());
SearchCond cond = SearchCond.and(subCond, SearchCond.of(loginDateCond));
assertTrue(cond.isValid());
long count = searchDAO.count(
realmDAO.getRoot(), true, SyncopeConstants.FULL_ADMIN_REALMS, cond, AnyTypeKind.USER);
assertEquals(1, count);
List<User> users = searchDAO.search(
realmDAO.getRoot(), true, SyncopeConstants.FULL_ADMIN_REALMS, cond,
PageRequest.of(0, 2), AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
users = searchDAO.search(
realmDAO.getRoot(), true, SyncopeConstants.FULL_ADMIN_REALMS, cond,
PageRequest.of(2, 2), AnyTypeKind.USER);
assertNotNull(users);
assertTrue(users.isEmpty());
}
@Test
public void searchByGroup() {
MembershipCond groupCond = new MembershipCond();
groupCond.setGroup("child");
List<User> matchingChild = searchDAO.search(SearchCond.of(groupCond), AnyTypeKind.USER);
assertNotNull(matchingChild);
assertTrue(matchingChild.stream().anyMatch(user -> "verdi".equals(user.getUsername())));
groupCond.setGroup("otherchild");
List<User> matchingOtherChild = searchDAO.search(SearchCond.of(groupCond), AnyTypeKind.USER);
assertNotNull(matchingOtherChild);
assertTrue(matchingOtherChild.stream().anyMatch(user -> "rossini".equals(user.getUsername())));
Set<String> union = Stream.concat(
matchingChild.stream().map(User::getUsername),
matchingOtherChild.stream().map(User::getUsername)).
collect(Collectors.toSet());
groupCond.setGroup("%child");
List<User> matchingStar = searchDAO.search(SearchCond.of(groupCond), AnyTypeKind.USER);
assertNotNull(matchingStar);
assertTrue(matchingStar.stream().anyMatch(user -> "verdi".equals(user.getUsername())));
assertTrue(matchingStar.stream().anyMatch(user -> "rossini".equals(user.getUsername())));
assertEquals(union, matchingStar.stream().map(User::getUsername).collect(Collectors.toSet()));
matchingStar = searchDAO.search(realmDAO.getRoot(), false, SyncopeConstants.FULL_ADMIN_REALMS,
SearchCond.of(groupCond), Pageable.unpaged(), AnyTypeKind.USER);
assertNotNull(matchingStar);
assertTrue(matchingStar.stream().anyMatch(user -> "verdi".equals(user.getUsername())));
assertTrue(matchingStar.stream().noneMatch(user -> "rossini".equals(user.getUsername())));
}
@Test
public void searchByRole() {
RoleCond roleCond = new RoleCond();
roleCond.setRole("Other");
List<User> users = searchDAO.search(SearchCond.of(roleCond), AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
}
@Test
public void searchByIsNull() {
AttrCond coolLeafCond = new AttrCond(AttrCond.Type.ISNULL);
coolLeafCond.setSchema("cool");
List<User> users = searchDAO.search(SearchCond.of(coolLeafCond), AnyTypeKind.USER);
assertNotNull(users);
assertEquals(4, users.size());
coolLeafCond = new AttrCond(AttrCond.Type.ISNOTNULL);
coolLeafCond.setSchema("cool");
users = searchDAO.search(SearchCond.of(coolLeafCond), AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
}
@Test
public void searchByAuxClass() {
AuxClassCond ac = new AuxClassCond();
ac.setAuxClass("csv");
List<Group> groups = searchDAO.search(SearchCond.of(ac), AnyTypeKind.GROUP);
assertNotNull(groups);
assertEquals(2, groups.size());
}
@Test
public void searchByResource() {
ResourceCond ws2 = new ResourceCond();
ws2.setResource("ws-target-resource-2");
ResourceCond ws1 = new ResourceCond();
ws1.setResource("ws-target-resource-list-mappings-2");
SearchCond searchCondition = SearchCond.and(SearchCond.negate(ws2), SearchCond.of(ws1));
assertTrue(searchCondition.isValid());
List<User> users = searchDAO.search(searchCondition, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
}
@Test
public void searchByBooleanAttrCond() {
AttrCond booleanCond = new AttrCond(AnyCond.Type.EQ);
booleanCond.setSchema("show");
booleanCond.setExpression("true");
List<Group> matchingGroups = searchDAO.search(SearchCond.of(booleanCond), AnyTypeKind.GROUP);
assertNotNull(matchingGroups);
assertFalse(matchingGroups.isEmpty());
}
@Test
public void searchByUsernameAndKey() {
AnyCond usernameLeafCond = new AnyCond(AnyCond.Type.LIKE);
usernameLeafCond.setSchema("username");
usernameLeafCond.setExpression("%ini");
AnyCond idRightCond = new AnyCond(AnyCond.Type.LT);
idRightCond.setSchema("key");
idRightCond.setExpression("2");
SearchCond searchCondition = SearchCond.and(
SearchCond.of(usernameLeafCond),
SearchCond.of(idRightCond));
List<User> matching = searchDAO.search(searchCondition, AnyTypeKind.USER);
assertNotNull(matching);
assertEquals(1, matching.size());
assertEquals("rossini", matching.getFirst().getUsername());
assertEquals("1417acbe-cbf6-4277-9372-e75e04f97000", matching.getFirst().getKey());
}
@Test
public void searchByGroupNameAndKey() {
AnyCond groupNameLeafCond = new AnyCond(AnyCond.Type.EQ);
groupNameLeafCond.setSchema("name");
groupNameLeafCond.setExpression("root");
AnyCond idRightCond = new AnyCond(AnyCond.Type.EQ);
idRightCond.setSchema("key");
idRightCond.setExpression("37d15e4c-cdc1-460b-a591-8505c8133806");
SearchCond searchCondition = SearchCond.and(
SearchCond.of(groupNameLeafCond),
SearchCond.of(idRightCond));
assertTrue(searchCondition.isValid());
List<Group> matching = searchDAO.search(searchCondition, AnyTypeKind.GROUP);
assertNotNull(matching);
assertEquals(1, matching.size());
assertEquals("root", matching.getFirst().getName());
assertEquals("37d15e4c-cdc1-460b-a591-8505c8133806", matching.getFirst().getKey());
}
@Test
public void searchByUsernameAndFullname() {
AnyCond usernameLeafCond = new AnyCond(AnyCond.Type.EQ);
usernameLeafCond.setSchema("username");
usernameLeafCond.setExpression("rossini");
AttrCond idRightCond = new AttrCond(AttrCond.Type.LIKE);
idRightCond.setSchema("fullname");
idRightCond.setExpression("Giuseppe V%");
SearchCond searchCondition = SearchCond.or(
SearchCond.of(usernameLeafCond),
SearchCond.of(idRightCond));
List<User> matchingUsers = searchDAO.search(searchCondition, AnyTypeKind.USER);
assertNotNull(matchingUsers);
assertEquals(2, matchingUsers.size());
}
@Test
public void searchByUsernameAndFullnameIgnoreCase() {
AnyCond usernameLeafCond = new AnyCond(AnyCond.Type.IEQ);
usernameLeafCond.setSchema("username");
usernameLeafCond.setExpression("RoSsini");
AttrCond idRightCond = new AttrCond(AttrCond.Type.ILIKE);
idRightCond.setSchema("fullname");
idRightCond.setExpression("gIuseppe v%");
SearchCond searchCondition = SearchCond.or(
SearchCond.of(usernameLeafCond),
SearchCond.of(idRightCond));
List<User> matchingUsers = searchDAO.search(searchCondition, AnyTypeKind.USER);
assertNotNull(matchingUsers);
assertEquals(2, matchingUsers.size());
}
@Test
public void searchByKey() {
AnyCond idLeafCond = new AnyCond(AnyCond.Type.EQ);
idLeafCond.setSchema("key");
idLeafCond.setExpression("74cd8ece-715a-44a4-a736-e17b46c4e7e6");
SearchCond searchCondition = SearchCond.of(idLeafCond);
assertTrue(searchCondition.isValid());
List<User> users = searchDAO.search(searchCondition, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
assertEquals("74cd8ece-715a-44a4-a736-e17b46c4e7e6", users.getFirst().getKey());
}
@Test
public void searchByType() {
AnyTypeCond tcond = new AnyTypeCond();
tcond.setAnyTypeKey("PRINTER");
SearchCond searchCondition = SearchCond.of(tcond);
assertTrue(searchCondition.isValid());
List<AnyObject> printers = searchDAO.search(searchCondition, AnyTypeKind.ANY_OBJECT);
assertNotNull(printers);
assertEquals(3, printers.size());
tcond.setAnyTypeKey("UNEXISTING");
printers = searchDAO.search(searchCondition, AnyTypeKind.ANY_OBJECT);
assertNotNull(printers);
assertTrue(printers.isEmpty());
}
@Test
public void searchByRelationshipType() {
// 1. first search for printers involved in "inclusion" relationship
RelationshipTypeCond relationshipTypeCond = new RelationshipTypeCond();
relationshipTypeCond.setRelationshipType("inclusion");
AnyTypeCond tcond = new AnyTypeCond();
tcond.setAnyTypeKey("PRINTER");
SearchCond cond = SearchCond.and(SearchCond.of(relationshipTypeCond), SearchCond.of(tcond));
assertTrue(cond.isValid());
List<AnyObject> anyObjects = searchDAO.search(cond, AnyTypeKind.ANY_OBJECT);
assertNotNull(anyObjects);
assertEquals(2, anyObjects.size());
assertTrue(anyObjects.stream().anyMatch(any -> "fc6dbc3a-6c07-4965-8781-921e7401a4a5".equals(any.getKey())));
assertTrue(anyObjects.stream().anyMatch(any -> "8559d14d-58c2-46eb-a2d4-a7d35161e8f8".equals(any.getKey())));
// 2. search for users involved in "neighborhood" relationship
relationshipTypeCond.setRelationshipType("neighborhood");
cond = SearchCond.of(relationshipTypeCond);
List<User> users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
assertEquals("c9b2dec2-00a7-4855-97c0-d854842b4b24", users.getFirst().getKey());
}
@Test
public void searchByAnyCondDate() {
AnyCond creationDateCond = new AnyCond(AnyCond.Type.EQ);
creationDateCond.setSchema("creationDate");
creationDateCond.setExpression("2021-04-15 12:45:00");
SearchCond searchCondition = SearchCond.of(creationDateCond);
assertTrue(searchCondition.isValid());
List<AnyObject> anyObjects = searchDAO.search(searchCondition, AnyTypeKind.ANY_OBJECT);
assertNotNull(anyObjects);
assertEquals(1, anyObjects.size());
assertEquals("9e1d130c-d6a3-48b1-98b3-182477ed0688", anyObjects.getFirst().getKey());
}
@Test
public void searchByAttrCondDate() {
AttrCond loginDateCond = new AttrCond(AnyCond.Type.LT);
loginDateCond.setSchema("loginDate");
loginDateCond.setExpression("2009-05-27");
SearchCond searchCondition = SearchCond.of(loginDateCond);
assertTrue(searchCondition.isValid());
List<User> users = searchDAO.search(searchCondition, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
assertEquals("1417acbe-cbf6-4277-9372-e75e04f97000", users.getFirst().getKey());
}
@Test
public void userOrderBy() {
AnyCond usernameLeafCond = new AnyCond(AnyCond.Type.EQ);
usernameLeafCond.setSchema("username");
usernameLeafCond.setExpression("rossini");
AttrCond idRightCond = new AttrCond(AttrCond.Type.LIKE);
idRightCond.setSchema("fullname");
idRightCond.setExpression("Giuseppe V%");
SearchCond searchCondition = SearchCond.or(
SearchCond.of(usernameLeafCond), SearchCond.of(idRightCond));
List<Sort.Order> orderByClauses = new ArrayList<>();
orderByClauses.add(new Sort.Order(Sort.Direction.DESC, "username"));
orderByClauses.add(new Sort.Order(Sort.Direction.ASC, "fullname"));
orderByClauses.add(new Sort.Order(Sort.Direction.ASC, "status"));
orderByClauses.add(new Sort.Order(Sort.Direction.DESC, "firstname"));
List<User> users = searchDAO.search(searchCondition, orderByClauses, AnyTypeKind.USER);
assertEquals(
searchDAO.count(
realmDAO.getRoot(), true,
SyncopeConstants.FULL_ADMIN_REALMS, searchCondition, AnyTypeKind.USER),
users.size());
}
@Test
public void groupOrderBy() {
AnyCond idLeafCond = new AnyCond(AnyCond.Type.LIKE);
idLeafCond.setSchema("name");
idLeafCond.setExpression("%r");
SearchCond searchCondition = SearchCond.of(idLeafCond);
assertTrue(searchCondition.isValid());
Sort.Order orderByClause = new Sort.Order(Sort.DEFAULT_DIRECTION, "name");
List<Group> groups = searchDAO.search(
searchCondition, List.of(orderByClause), AnyTypeKind.GROUP);
assertEquals(
searchDAO.count(
realmDAO.getRoot(), true,
SyncopeConstants.FULL_ADMIN_REALMS, searchCondition, AnyTypeKind.GROUP),
groups.size());
}
@Test
public void member() {
MemberCond memberCond = new MemberCond();
memberCond.setMember("1417acbe-cbf6-4277-9372-e75e04f97000");
SearchCond searchCondition = SearchCond.of(memberCond);
assertTrue(searchCondition.isValid());
List<Group> groups = searchDAO.search(searchCondition, AnyTypeKind.GROUP);
assertEquals(2, groups.size());
assertTrue(groups.contains(groupDAO.findByName("root").orElseThrow()));
assertTrue(groups.contains(groupDAO.findByName("otherchild").orElseThrow()));
}
@Test
public void asGroupOwner() {
// prepare authentication
Map<String, Set<String>> entForRealms = new HashMap<>();
roleDAO.findById(RoleDAO.GROUP_OWNER_ROLE).orElseThrow().getEntitlements().forEach(entitlement -> {
Set<String> realms = Optional.ofNullable(entForRealms.get(entitlement)).orElseGet(() -> {
Set<String> r = new HashSet<>();
entForRealms.put(entitlement, r);
return r;
});
realms.add(RealmUtils.getGroupOwnerRealm(
SyncopeConstants.ROOT_REALM, "37d15e4c-cdc1-460b-a591-8505c8133806"));
});
Set<SyncopeGrantedAuthority> authorities = new HashSet<>();
entForRealms.forEach((key, value) -> {
SyncopeGrantedAuthority authority = new SyncopeGrantedAuthority(key);
authority.addRealms(value);
authorities.add(authority);
});
UsernamePasswordAuthenticationToken auth = new UsernamePasswordAuthenticationToken(
new org.springframework.security.core.userdetails.User(
"poorGroupOwner", "FAKE_PASSWORD", authorities), "FAKE_PASSWORD", authorities);
auth.setDetails(new SyncopeAuthenticationDetails(SyncopeConstants.MASTER_DOMAIN, null));
SecurityContextHolder.getContext().setAuthentication(auth);
try {
// test count() and search()
Set<String> authRealms = RealmUtils.getEffective(
AuthContextUtils.getAuthorizations().get(IdRepoEntitlement.GROUP_SEARCH),
SyncopeConstants.ROOT_REALM);
assertEquals(
1,
searchDAO.count(
realmDAO.getRoot(), true, authRealms, groupDAO.getAllMatchingCond(), AnyTypeKind.GROUP));
List<Group> groups = searchDAO.search(
realmDAO.getRoot(),
true,
authRealms,
groupDAO.getAllMatchingCond(),
PageRequest.of(0, 10),
AnyTypeKind.GROUP);
assertEquals(1, groups.size());
assertEquals("37d15e4c-cdc1-460b-a591-8505c8133806", groups.getFirst().getKey());
} finally {
SecurityContextHolder.getContext().setAuthentication(null);
}
}
@Test
public void changePwdDate() {
AnyCond statusCond = new AnyCond(AttrCond.Type.IEQ);
statusCond.setSchema("status");
statusCond.setExpression("suspended");
AnyCond changePwdDateCond = new AnyCond(AttrCond.Type.ISNULL);
changePwdDateCond.setSchema("changePwdDate");
SearchCond cond = SearchCond.and(SearchCond.negate(statusCond), SearchCond.of(changePwdDateCond));
assertTrue(cond.isValid());
List<User> users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(5, users.size());
}
@Test
public void issue202() {
ResourceCond ws2 = new ResourceCond();
ws2.setResource("ws-target-resource-2");
ResourceCond ws1 = new ResourceCond();
ws1.setResource("ws-target-resource-list-mappings-1");
SearchCond searchCondition =
SearchCond.and(SearchCond.negate(ws2), SearchCond.negate(ws1));
assertTrue(searchCondition.isValid());
List<User> users = searchDAO.search(searchCondition, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(2, users.size());
assertTrue(users.stream().anyMatch(user -> "c9b2dec2-00a7-4855-97c0-d854842b4b24".equals(user.getKey())));
}
@Test
public void issue242() {
AnyCond cond = new AnyCond(AttrCond.Type.LIKE);
cond.setSchema("key");
cond.setExpression("test%");
SearchCond searchCondition = SearchCond.of(cond);
assertTrue(searchCondition.isValid());
List<User> users = searchDAO.search(searchCondition, AnyTypeKind.USER);
assertNotNull(users);
assertTrue(users.isEmpty());
}
@Test
public void issueSYNCOPE46() {
AnyCond cond = new AnyCond(AttrCond.Type.LIKE);
cond.setSchema("username");
cond.setExpression("%ossin%");
SearchCond searchCondition = SearchCond.of(cond);
assertTrue(searchCondition.isValid());
List<User> users = searchDAO.search(searchCondition, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
}
@Test
public void issueSYNCOPE433() {
AttrCond isNullCond = new AttrCond(AttrCond.Type.ISNULL);
isNullCond.setSchema("loginDate");
AnyCond likeCond = new AnyCond(AttrCond.Type.LIKE);
likeCond.setSchema("username");
likeCond.setExpression("%ossin%");
SearchCond searchCond = SearchCond.or(
SearchCond.of(isNullCond), SearchCond.of(likeCond));
long count = searchDAO.count(
realmDAO.getRoot(), true, SyncopeConstants.FULL_ADMIN_REALMS, searchCond, AnyTypeKind.USER);
assertTrue(count > 0);
}
@Test
public void issueSYNCOPE929() {
AttrCond rossiniCond = new AttrCond(AttrCond.Type.EQ);
rossiniCond.setSchema("surname");
rossiniCond.setExpression("Rossini");
AttrCond genderCond = new AttrCond(AttrCond.Type.EQ);
genderCond.setSchema("gender");
genderCond.setExpression("M");
SearchCond orCond = SearchCond.or(SearchCond.of(rossiniCond), SearchCond.of(genderCond));
AttrCond belliniCond = new AttrCond(AttrCond.Type.EQ);
belliniCond.setSchema("surname");
belliniCond.setExpression("Bellini");
SearchCond searchCond = SearchCond.and(orCond, SearchCond.of(belliniCond));
List<User> users = searchDAO.search(searchCond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(1, users.size());
}
@Test
public void issueSYNCOPE980() {
AnyType service = entityFactory.newEntity(AnyType.class);
service.setKey("SERVICE");
service.setKind(AnyTypeKind.ANY_OBJECT);
service = anyTypeDAO.save(service);
Group citizen = groupDAO.findByName("citizen").orElseThrow();
assertNotNull(citizen);
AnyObject anyObject = entityFactory.newEntity(AnyObject.class);
anyObject.setName("one");
anyObject.setType(service);
anyObject.setRealm(realmSearchDAO.findByFullPath(SyncopeConstants.ROOT_REALM).orElseThrow());
AMembership membership = entityFactory.newEntity(AMembership.class);
membership.setRightEnd(citizen);
membership.setLeftEnd(anyObject);
anyObject.add(membership);
anyObjectDAO.save(anyObject);
anyObject = anyObjectDAO.findById("fc6dbc3a-6c07-4965-8781-921e7401a4a5").orElseThrow();
membership = entityFactory.newEntity(AMembership.class);
membership.setRightEnd(citizen);
membership.setLeftEnd(anyObject);
anyObject.add(membership);
anyObjectDAO.save(anyObject);
MembershipCond groupCond = new MembershipCond();
groupCond.setGroup("citizen");
SearchCond searchCondition = SearchCond.of(groupCond);
List<AnyObject> matching = searchDAO.search(searchCondition, AnyTypeKind.ANY_OBJECT);
assertEquals(2, matching.size());
AnyTypeCond anyTypeCond = new AnyTypeCond();
anyTypeCond.setAnyTypeKey(service.getKey());
searchCondition = SearchCond.and(SearchCond.of(groupCond), SearchCond.of(anyTypeCond));
matching = searchDAO.search(searchCondition, AnyTypeKind.ANY_OBJECT);
assertEquals(1, matching.size());
}
@Test
public void issueSYNCOPE983() {
AttrCond fullnameLeafCond = new AttrCond(AttrCond.Type.LIKE);
fullnameLeafCond.setSchema("surname");
fullnameLeafCond.setExpression("%o%");
List<Sort.Order> orderByClauses = new ArrayList<>();
orderByClauses.add(new Sort.Order(Sort.Direction.ASC, "surname"));
orderByClauses.add(new Sort.Order(Sort.Direction.DESC, "username"));
List<User> users = searchDAO.search(
realmDAO.getRoot(),
true,
SyncopeConstants.FULL_ADMIN_REALMS,
SearchCond.of(fullnameLeafCond),
Pageable.unpaged(Sort.by(orderByClauses)),
AnyTypeKind.USER);
assertFalse(users.isEmpty());
}
@Test
public void issueSYNCOPE1416() {
AttrCond idLeftCond = new AttrCond(AttrCond.Type.ISNOTNULL);
idLeftCond.setSchema("surname");
AttrCond idRightCond = new AttrCond(AttrCond.Type.ISNOTNULL);
idRightCond.setSchema("firstname");
SearchCond searchCondition = SearchCond.and(SearchCond.of(idLeftCond), SearchCond.of(idRightCond));
List<Sort.Order> orderByClauses = List.of(new Sort.Order(Sort.Direction.ASC, "ctype"));
List<User> users = searchDAO.search(searchCondition, orderByClauses, AnyTypeKind.USER);
assertEquals(searchDAO.count(
realmDAO.getRoot(), true, SyncopeConstants.FULL_ADMIN_REALMS, searchCondition, AnyTypeKind.USER),
users.size());
// search by attribute with unique constraint
AttrCond fullnameCond = new AttrCond(AttrCond.Type.ISNOTNULL);
fullnameCond.setSchema("fullname");
SearchCond cond = SearchCond.of(fullnameCond);
assertTrue(cond.isValid());
users = searchDAO.search(cond, AnyTypeKind.USER);
assertEquals(5, users.size());
fullnameCond = new AttrCond(AttrCond.Type.ISNULL);
fullnameCond.setSchema("fullname");
cond = SearchCond.of(fullnameCond);
assertTrue(cond.isValid());
users = searchDAO.search(cond, AnyTypeKind.USER);
assertTrue(users.isEmpty());
}
@Test
public void issueSYNCOPE1419() {
AttrCond loginDateCond = new AttrCond(AttrCond.Type.EQ);
loginDateCond.setSchema("loginDate");
loginDateCond.setExpression(LOGIN_DATE_VALUE);
SearchCond cond = SearchCond.negate(loginDateCond);
assertTrue(cond.isValid());
List<User> users = searchDAO.search(cond, AnyTypeKind.USER);
assertNotNull(users);
assertEquals(4, users.size());
}
}
|
apache/jena | 37,453 | jena-shacl/src/main/java/org/apache/jena/shacl/compact/reader/parser/ShaclCompactParserJJ.java | /* ShaclCompactParserJJ.java */
/* Generated By:JavaCC: Do not edit this line. ShaclCompactParserJJ.java */
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.shacl.compact.reader.parser;
import java.util.List;
import java.util.ArrayList;
import org.apache.jena.graph.*;
import org.apache.jena.sparql.path.*;
import org.apache.jena.shacl.compact.reader.*;
import static org.apache.jena.riot.lang.extra.LangParserLib.*;
public class ShaclCompactParserJJ extends ShaclCompactParser implements ShaclCompactParserJJConstants {
final public void Unit() throws ParseException {
ByteOrderMark();
shaclDoc();
jj_consume_token(0);
}
final public void ByteOrderMark() throws ParseException {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case BOM:{
jj_consume_token(BOM);
break;
}
default:
jj_la1[0] = jj_gen;
;
}
}
final public void shaclDoc() throws ParseException {
label_1:
while (true) {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case BASE:
case IMPORTS:
case PREFIX:{
;
break;
}
default:
jj_la1[1] = jj_gen;
break label_1;
}
directive();
}
label_2:
while (true) {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case SHAPE_CLASS:
case SHAPE:{
;
break;
}
default:
jj_la1[2] = jj_gen;
break label_2;
}
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case SHAPE:{
nodeShape();
break;
}
case SHAPE_CLASS:{
shapeClass();
break;
}
default:
jj_la1[3] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
}
}
final public void directive() throws ParseException {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case BASE:{
baseDecl();
break;
}
case PREFIX:{
prefixDecl();
break;
}
case IMPORTS:{
importDecl();
break;
}
default:
jj_la1[4] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
}
final public void baseDecl() throws ParseException {String iri ;
jj_consume_token(BASE);
iri = IRIREF();
rBase(iri) ;
}
final public void prefixDecl() throws ParseException {Token t ; String iri ;
jj_consume_token(PREFIX);
t = jj_consume_token(PNAME_NS);
iri = IRIREF();
String s = canonicalPrefix(t.image, t.beginLine, t.beginColumn) ;
rPrefix(s, iri);
}
final public void importDecl() throws ParseException {String iri ;
jj_consume_token(IMPORTS);
iri = iri();
rImports(iri);
}
final public void nodeShape() throws ParseException {String iri;
startNodeShape();
jj_consume_token(SHAPE);
iri = iri();
rNodeShape(iri);
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case 1:{
targetClass();
break;
}
default:
jj_la1[5] = jj_gen;
;
}
nodeShapeBody();
finishNodeShape();
}
final public void shapeClass() throws ParseException {String iri;
startShapeClass();
jj_consume_token(SHAPE_CLASS);
iri = iri();
rShapeClass(iri);
nodeShapeBody();
finishShapeClass();
}
final public void targetClass() throws ParseException {String iri;
jj_consume_token(1);
label_3:
while (true) {
iri = iri();
rTargetClass(iri);
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case IRIref:
case PNAME_NS:
case PNAME_LN:{
;
break;
}
default:
jj_la1[6] = jj_gen;
break label_3;
}
}
}
final public void nodeShapeBody() throws ParseException {
startNodeShapeBody() ;
jj_consume_token(LBRACE);
label_4:
while (true) {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case 9:
case 10:
case 11:
case 12:
case 13:
case 14:
case 15:
case 16:
case 17:
case 18:
case 19:
case 20:
case 21:
case 22:
case 23:
case 24:
case 25:
case 26:
case 27:
case 28:
case 29:
case 30:
case 31:
case 32:
case 33:
case AT:
case CARAT:
case BANG:
case LPAREN:
case IRIref:
case PNAME_NS:
case PNAME_LN:
case ATPNAME_NS:
case ATPNAME_LN:{
;
break;
}
default:
jj_la1[7] = jj_gen;
break label_4;
}
constraint();
}
jj_consume_token(RBRACE);
finishNodeShapeBody() ;
}
final public void constraint() throws ParseException {
startConstraint();
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case 9:
case 10:
case 11:
case 12:
case 13:
case 14:
case 15:
case 16:
case 17:
case 18:
case 19:
case 20:
case 21:
case 22:
case 23:
case 24:
case 25:
case 26:
case 27:
case 28:
case 29:
case 30:
case 31:
case 32:
case 33:
case BANG:{
label_5:
while (true) {
nodeOr();
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case 9:
case 10:
case 11:
case 12:
case 13:
case 14:
case 15:
case 16:
case 17:
case 18:
case 19:
case 20:
case 21:
case 22:
case 23:
case 24:
case 25:
case 26:
case 27:
case 28:
case 29:
case 30:
case 31:
case 32:
case 33:
case BANG:{
;
break;
}
default:
jj_la1[8] = jj_gen;
break label_5;
}
}
break;
}
case CARAT:
case LPAREN:
case IRIref:
case PNAME_NS:
case PNAME_LN:{
propertyShape();
break;
}
case AT:
case ATPNAME_NS:
case ATPNAME_LN:{
shapeRef(false);
break;
}
default:
jj_la1[9] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
finishConstraint() ;
jj_consume_token(DOT);
}
final public void nodeOr() throws ParseException {
startNodeOr();
rNodeOr_pre();
nodeNot();
rNodeOr_post();
label_6:
while (true) {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case VBAR:{
;
break;
}
default:
jj_la1[10] = jj_gen;
break label_6;
}
jj_consume_token(VBAR);
rNodeOr_pre();
nodeNot();
rNodeOr_post();
}
finishNodeOr();
}
final public void nodeNot() throws ParseException {boolean b = false;
startNodeNot();
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case BANG:{
negation();
b = true;
break;
}
default:
jj_la1[11] = jj_gen;
;
}
beginNodeNot(b);
nodeValue();
finishNodeNot(b);
}
final public void negation() throws ParseException {
jj_consume_token(BANG);
}
final public void nodeValue() throws ParseException {String s; Node n; List<Node> x;
s = nodeParam();
jj_consume_token(EQUALS);
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case TRUE:
case FALSE:
case IRIref:
case PNAME_NS:
case PNAME_LN:
case STRING_LITERAL1:
case STRING_LITERAL2:
case STRING_LITERAL_LONG1:
case STRING_LITERAL_LONG2:
case INTEGER:
case DECIMAL:
case DOUBLE:{
n = iriOrLiteral();
rNodeValue(s, n);
break;
}
case LBRACKET:{
x = array();
rNodeValue(s, x);
break;
}
default:
jj_la1[12] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
}
final public void propertyShape() throws ParseException {Path p ;
startPropertyShape();
p = path();
rPropertyShape(p);
label_7:
while (true) {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case 3:
case 4:
case 5:
case 6:
case 7:
case 8:
case 13:
case 14:
case 15:
case 16:
case 17:
case 18:
case 19:
case 20:
case 21:
case 22:
case 23:
case 24:
case 25:
case 26:
case 27:
case 28:
case 29:
case 30:
case 31:
case 32:
case 33:
case 34:
case 35:
case 36:
case 37:
case 38:
case 39:
case 40:
case 41:
case 42:
case 43:
case 44:
case 45:
case AT:
case BANG:
case LBRACE:
case LBRACKET:
case IRIref:
case PNAME_NS:
case PNAME_LN:
case ATPNAME_NS:
case ATPNAME_LN:{
;
break;
}
default:
jj_la1[13] = jj_gen;
break label_7;
}
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case LBRACKET:{
propertyCount();
break;
}
case 3:
case 4:
case 5:
case 6:
case 7:
case 8:
case 13:
case 14:
case 15:
case 16:
case 17:
case 18:
case 19:
case 20:
case 21:
case 22:
case 23:
case 24:
case 25:
case 26:
case 27:
case 28:
case 29:
case 30:
case 31:
case 32:
case 33:
case 34:
case 35:
case 36:
case 37:
case 38:
case 39:
case 40:
case 41:
case 42:
case 43:
case 44:
case 45:
case AT:
case BANG:
case LBRACE:
case IRIref:
case PNAME_NS:
case PNAME_LN:
case ATPNAME_NS:
case ATPNAME_LN:{
propertyOr();
break;
}
default:
jj_la1[14] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
}
finishPropertyShape();
}
final public void propertyOr() throws ParseException {
startPropertyOr();
rPropertyOr_pre();
propertyNot();
rPropertyOr_post();
label_8:
while (true) {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case VBAR:{
;
break;
}
default:
jj_la1[15] = jj_gen;
break label_8;
}
jj_consume_token(VBAR);
rPropertyOr_pre();
propertyNot();
rPropertyOr_post();
}
finishPropertyOr();
}
final public void propertyNot() throws ParseException {boolean b = false;
startPropertyNot();
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case BANG:{
negation();
b = true;
break;
}
default:
jj_la1[16] = jj_gen;
;
}
beginPropertyNot(b);
propertyAtom();
finishPropertyNot(b);
}
final public void propertyAtom() throws ParseException {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case IRIref:
case PNAME_NS:
case PNAME_LN:{
propertyType();
break;
}
case 3:
case 4:
case 5:
case 6:
case 7:
case 8:{
nodeKind();
break;
}
case AT:
case ATPNAME_NS:
case ATPNAME_LN:{
shapeRef(true);
break;
}
case 13:
case 14:
case 15:
case 16:
case 17:
case 18:
case 19:
case 20:
case 21:
case 22:
case 23:
case 24:
case 25:
case 26:
case 27:
case 28:
case 29:
case 30:
case 31:
case 32:
case 33:
case 34:
case 35:
case 36:
case 37:
case 38:
case 39:
case 40:
case 41:
case 42:
case 43:
case 44:
case 45:{
propertyValue();
break;
}
case LBRACE:{
startNestedPropertyAtom();
nodeShapeBody();
finishNestedPropertyAtom();
break;
}
default:
jj_la1[17] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
}
final public void propertyCount() throws ParseException {String s1, s2;
jj_consume_token(LBRACKET);
s1 = propertyMinCount();
jj_consume_token(2);
s2 = propertyMaxCount();
jj_consume_token(RBRACKET);
rPropertyCount(s1, s2);
}
final public String propertyMinCount() throws ParseException {Token t;
t = jj_consume_token(INTEGER);
{if ("" != null) return t.image;}
throw new Error("Missing return statement in function");
}
final public String propertyMaxCount() throws ParseException {Token t;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case INTEGER:{
t = jj_consume_token(INTEGER);
break;
}
case STAR:{
t = jj_consume_token(STAR);
break;
}
default:
jj_la1[18] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
{if ("" != null) return t.image;}
throw new Error("Missing return statement in function");
}
final public void propertyType() throws ParseException {String iriStr;
iriStr = iri();
rPropertyType(iriStr);
}
final public void nodeKind() throws ParseException {Token t;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case 3:{
t = jj_consume_token(3);
break;
}
case 4:{
t = jj_consume_token(4);
break;
}
case 5:{
t = jj_consume_token(5);
break;
}
case 6:{
t = jj_consume_token(6);
break;
}
case 7:{
t = jj_consume_token(7);
break;
}
case 8:{
t = jj_consume_token(8);
break;
}
default:
jj_la1[19] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
rNodeKind(t.image);
}
final public void shapeRef(boolean inPropertyShape) throws ParseException {Token t; String iriStr;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case ATPNAME_LN:{
t = jj_consume_token(ATPNAME_LN);
iriStr = resolvePName(t.image.substring(1), t.beginLine, t.beginColumn) ;
break;
}
case ATPNAME_NS:{
t = jj_consume_token(ATPNAME_NS);
iriStr = resolvePName(t.image.substring(1), t.beginLine, t.beginColumn) ;
break;
}
case AT:{
jj_consume_token(AT);
iriStr = IRIREF();
break;
}
default:
jj_la1[20] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
rShapeRef(inPropertyShape, iriStr);
}
final public void propertyValue() throws ParseException {String s; Node n; List<Node> x;
s = propertyParam();
jj_consume_token(EQUALS);
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case TRUE:
case FALSE:
case IRIref:
case PNAME_NS:
case PNAME_LN:
case STRING_LITERAL1:
case STRING_LITERAL2:
case STRING_LITERAL_LONG1:
case STRING_LITERAL_LONG2:
case INTEGER:
case DECIMAL:
case DOUBLE:{
n = iriOrLiteral();
rParamValue(s, n);
break;
}
case LBRACKET:{
x = array();
rParamValue(s, x);
break;
}
default:
jj_la1[21] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
}
// Assemble items to build with from hereon down.
// Return Java objects.
final public
String nodeParam() throws ParseException {Token t ;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case 9:{
t = jj_consume_token(9);
break;
}
case 10:{
t = jj_consume_token(10);
break;
}
case 11:{
t = jj_consume_token(11);
break;
}
case 12:{
// <EXT>
// Extension for symmetry!
t = jj_consume_token(12);
break;
}
case 13:{
// </EXT>
t = jj_consume_token(13);
break;
}
case 14:{
t = jj_consume_token(14);
break;
}
case 15:{
t = jj_consume_token(15);
break;
}
case 16:{
t = jj_consume_token(16);
break;
}
case 17:{
t = jj_consume_token(17);
break;
}
case 18:{
t = jj_consume_token(18);
break;
}
case 19:{
t = jj_consume_token(19);
break;
}
case 20:{
t = jj_consume_token(20);
break;
}
case 21:{
t = jj_consume_token(21);
break;
}
case 22:{
t = jj_consume_token(22);
break;
}
case 23:{
t = jj_consume_token(23);
break;
}
case 24:{
t = jj_consume_token(24);
break;
}
case 25:{
t = jj_consume_token(25);
break;
}
case 26:{
t = jj_consume_token(26);
break;
}
case 27:{
t = jj_consume_token(27);
break;
}
case 28:{
t = jj_consume_token(28);
break;
}
case 29:{
t = jj_consume_token(29);
break;
}
case 30:{
t = jj_consume_token(30);
break;
}
case 31:{
t = jj_consume_token(31);
break;
}
case 32:{
t = jj_consume_token(32);
break;
}
case 33:{
t = jj_consume_token(33);
break;
}
default:
jj_la1[22] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
{if ("" != null) return t.image ;}
throw new Error("Missing return statement in function");
}
final public String propertyParam() throws ParseException {Token t;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case 13:{
t = jj_consume_token(13);
break;
}
case 14:{
t = jj_consume_token(14);
break;
}
case 15:{
t = jj_consume_token(15);
break;
}
case 16:{
t = jj_consume_token(16);
break;
}
case 17:{
t = jj_consume_token(17);
break;
}
case 18:{
t = jj_consume_token(18);
break;
}
case 19:{
t = jj_consume_token(19);
break;
}
case 20:{
t = jj_consume_token(20);
break;
}
case 21:{
t = jj_consume_token(21);
break;
}
case 22:{
t = jj_consume_token(22);
break;
}
case 23:{
t = jj_consume_token(23);
break;
}
case 24:{
t = jj_consume_token(24);
break;
}
case 25:{
t = jj_consume_token(25);
break;
}
case 26:{
t = jj_consume_token(26);
break;
}
case 27:{
t = jj_consume_token(27);
break;
}
case 34:{
t = jj_consume_token(34);
break;
}
case 28:{
t = jj_consume_token(28);
break;
}
case 29:{
t = jj_consume_token(29);
break;
}
case 35:{
t = jj_consume_token(35);
break;
}
case 36:{
t = jj_consume_token(36);
break;
}
case 37:{
t = jj_consume_token(37);
break;
}
case 38:{
t = jj_consume_token(38);
break;
}
case 39:{
t = jj_consume_token(39);
break;
}
case 40:{
t = jj_consume_token(40);
break;
}
case 30:{
t = jj_consume_token(30);
break;
}
case 31:{
t = jj_consume_token(31);
break;
}
case 32:{
t = jj_consume_token(32);
break;
}
case 33:{
t = jj_consume_token(33);
break;
}
case 41:{
t = jj_consume_token(41);
break;
}
case 42:{
t = jj_consume_token(42);
break;
}
case 43:{
t = jj_consume_token(43);
break;
}
case 44:{
t = jj_consume_token(44);
break;
}
case 45:{
t = jj_consume_token(45);
break;
}
default:
jj_la1[23] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
{if ("" != null) return t.image;}
throw new Error("Missing return statement in function");
}
// Paths - subset of SPARQL Paths - no negation, no path property sets.
final public
Path PathUnit() throws ParseException {Path p ;
ByteOrderMark();
p = path();
jj_consume_token(0);
{if ("" != null) return p ;}
throw new Error("Missing return statement in function");
}
// Weakest outermost
final public Path path() throws ParseException {Path p ;
p = pathAlternative();
{if ("" != null) return p ;}
throw new Error("Missing return statement in function");
}
final public Path pathAlternative() throws ParseException {Path p1 , p2 ;
p1 = pathSequence();
label_9:
while (true) {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case VBAR:{
;
break;
}
default:
jj_la1[24] = jj_gen;
break label_9;
}
jj_consume_token(VBAR);
p2 = pathSequence();
p1 = PathFactory.pathAlt(p1, p2) ;
}
{if ("" != null) return p1 ;}
throw new Error("Missing return statement in function");
}
final public Path pathSequence() throws ParseException {Path p1 , p2 ;
p1 = pathEltOrInverse();
label_10:
while (true) {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case SLASH:{
;
break;
}
default:
jj_la1[25] = jj_gen;
break label_10;
}
jj_consume_token(SLASH);
p2 = pathEltOrInverse();
p1 = PathFactory.pathSeq(p1, p2) ;
}
{if ("" != null) return p1;}
throw new Error("Missing return statement in function");
}
// Path unit element, no inverse
final public Path pathElt() throws ParseException {String str ; Node n ; Path p ;
p = pathPrimary();
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case PLUS:
case QMARK:
case STAR:{
p = pathMod(p);
break;
}
default:
jj_la1[26] = jj_gen;
;
}
{if ("" != null) return p ;}
throw new Error("Missing return statement in function");
}
// Path unit element, including inverse.
final public Path pathEltOrInverse() throws ParseException {String str ; Node n ; Path p ;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case LPAREN:
case IRIref:
case PNAME_NS:
case PNAME_LN:{
p = pathElt();
break;
}
case CARAT:{
jj_consume_token(CARAT);
p = pathElt();
p = PathFactory.pathInverse(p) ;
break;
}
default:
jj_la1[27] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
{if ("" != null) return p ;}
throw new Error("Missing return statement in function");
}
final public Path pathMod(Path p) throws ParseException {long i1 ; long i2 ;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case QMARK:{
jj_consume_token(QMARK);
{if ("" != null) return PathFactory.pathZeroOrOne(p) ;}
break;
}
case STAR:{
jj_consume_token(STAR);
{if ("" != null) return PathFactory.pathZeroOrMore1(p) ;}
break;
}
case PLUS:{
jj_consume_token(PLUS);
{if ("" != null) return PathFactory.pathOneOrMore1(p) ;}
break;
}
default:
jj_la1[28] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
throw new Error("Missing return statement in function");
}
final public Path pathPrimary() throws ParseException {String str ; Path p ; Node n ;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case IRIref:
case PNAME_NS:
case PNAME_LN:{
str = iri();
n = createURI(str, token.beginLine, token.beginColumn) ; p = PathFactory.pathLink(n) ;
break;
}
case LPAREN:{
jj_consume_token(LPAREN);
p = path();
jj_consume_token(RPAREN);
break;
}
default:
jj_la1[29] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
{if ("" != null) return p ;}
throw new Error("Missing return statement in function");
}
// To preserve types, use ( iriOrLiteral() | array() ) directly
// void iriOrLiteralOrArray() : {}
// {
// (
// { Node n = null; }
// n = iriOrLiteral()
// { iriOrLiteralOrArray(n); }
// |
// { List<Node> x = null; }
// x = array()
// { iriOrLiteralOrArray(x); }
// )
// }
final public
List<Node> array() throws ParseException {List<Node> x = new ArrayList<Node>(); Node n = null;
jj_consume_token(LBRACKET);
label_11:
while (true) {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case TRUE:
case FALSE:
case IRIref:
case PNAME_NS:
case PNAME_LN:
case STRING_LITERAL1:
case STRING_LITERAL2:
case STRING_LITERAL_LONG1:
case STRING_LITERAL_LONG2:
case INTEGER:
case DECIMAL:
case DOUBLE:{
;
break;
}
default:
jj_la1[30] = jj_gen;
break label_11;
}
n = iriOrLiteral();
x.add(n);
}
jj_consume_token(RBRACKET);
{if ("" != null) return x;}
throw new Error("Missing return statement in function");
}
// Term generation
final public
Node iriOrLiteral() throws ParseException {Node n; String uriStr;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case IRIref:
case PNAME_NS:
case PNAME_LN:{
uriStr = iri();
n = createURI(uriStr, token.beginLine, token.beginColumn);
break;
}
case TRUE:
case FALSE:
case STRING_LITERAL1:
case STRING_LITERAL2:
case STRING_LITERAL_LONG1:
case STRING_LITERAL_LONG2:
case INTEGER:
case DECIMAL:
case DOUBLE:{
n = literal();
break;
}
default:
jj_la1[31] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
{if ("" != null) return n ;}
throw new Error("Missing return statement in function");
}
final public Node literal() throws ParseException {Node n ;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case STRING_LITERAL1:
case STRING_LITERAL2:
case STRING_LITERAL_LONG1:
case STRING_LITERAL_LONG2:{
n = rdfLiteral();
break;
}
case INTEGER:
case DECIMAL:
case DOUBLE:{
n = numericLiteral();
break;
}
case TRUE:
case FALSE:{
n = booleanLiteral();
break;
}
default:
jj_la1[32] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
{if ("" != null) return n;}
throw new Error("Missing return statement in function");
}
final public Node booleanLiteral() throws ParseException {
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case TRUE:{
jj_consume_token(TRUE);
{if ("" != null) return XSD_TRUE ;}
break;
}
case FALSE:{
jj_consume_token(FALSE);
{if ("" != null) return XSD_FALSE ;}
break;
}
default:
jj_la1[33] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
throw new Error("Missing return statement in function");
}
final public Node numericLiteral() throws ParseException {Token t ;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case INTEGER:{
t = jj_consume_token(INTEGER);
{if ("" != null) return createLiteralInteger(t.image, token.beginLine, token.beginColumn) ;}
break;
}
case DECIMAL:{
t = jj_consume_token(DECIMAL);
{if ("" != null) return createLiteralDecimal(t.image, token.beginLine, token.beginColumn) ;}
break;
}
case DOUBLE:{
t = jj_consume_token(DOUBLE);
{if ("" != null) return createLiteralDouble(t.image, token.beginLine, token.beginColumn) ;}
break;
}
default:
jj_la1[34] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
throw new Error("Missing return statement in function");
}
final public Node rdfLiteral() throws ParseException {Token t ; Token tLang; String lex = null ;
lex = string();
t = token;
String lang = null ; String dt = null ;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case 46:
case LANGTAG:{
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case LANGTAG:{
tLang = jj_consume_token(LANGTAG);
lang = stripChars(tLang.image, 1) ;
break;
}
case 46:{
jj_consume_token(46);
dt = datatype();
break;
}
default:
jj_la1[35] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
break;
}
default:
jj_la1[36] = jj_gen;
;
}
{if ("" != null) return createLiteral(lex, lang, dt, t.beginLine, t.beginColumn) ;}
throw new Error("Missing return statement in function");
}
final public String datatype() throws ParseException {String s;
s = iri();
{if ("" != null) return s;}
throw new Error("Missing return statement in function");
}
final public String string() throws ParseException {Token t ; String lex ;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case STRING_LITERAL1:{
t = jj_consume_token(STRING_LITERAL1);
lex = stripQuotes(t.image) ;
break;
}
case STRING_LITERAL2:{
t = jj_consume_token(STRING_LITERAL2);
lex = stripQuotes(t.image) ;
break;
}
case STRING_LITERAL_LONG1:{
t = jj_consume_token(STRING_LITERAL_LONG1);
lex = stripQuotes3(t.image) ;
break;
}
case STRING_LITERAL_LONG2:{
t = jj_consume_token(STRING_LITERAL_LONG2);
lex = stripQuotes3(t.image) ;
break;
}
default:
jj_la1[37] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
lex = unescapeStr(lex, t.beginLine, t.beginColumn) ;
{if ("" != null) return lex ;}
throw new Error("Missing return statement in function");
}
final public String iri() throws ParseException {String iri = null;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case IRIref:{
iri = IRIREF();
{if ("" != null) return iri ;}
break;
}
case PNAME_NS:
case PNAME_LN:{
iri = PrefixedName();
{if ("" != null) return iri ;}
break;
}
default:
jj_la1[38] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
throw new Error("Missing return statement in function");
}
final public String PrefixedName() throws ParseException {Token t ;
switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) {
case PNAME_LN:{
t = jj_consume_token(PNAME_LN);
{if ("" != null) return resolvePName(t.image, t.beginLine, t.beginColumn) ;}
break;
}
case PNAME_NS:{
t = jj_consume_token(PNAME_NS);
{if ("" != null) return resolvePName(t.image, t.beginLine, t.beginColumn) ;}
break;
}
default:
jj_la1[39] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
throw new Error("Missing return statement in function");
}
final public String IRIREF() throws ParseException {Token t ;
t = jj_consume_token(IRIref);
{if ("" != null) return resolveQuotedIRI(t.image, t.beginLine, t.beginColumn) ;}
throw new Error("Missing return statement in function");
}
/** Generated Token Manager. */
public ShaclCompactParserJJTokenManager token_source;
SimpleCharStream jj_input_stream;
/** Current token. */
public Token token;
/** Next token. */
public Token jj_nt;
private int jj_ntk;
private int jj_gen;
final private int[] jj_la1 = new int[40];
static private int[] jj_la1_0;
static private int[] jj_la1_1;
static private int[] jj_la1_2;
static private int[] jj_la1_3;
static {
jj_la1_init_0();
jj_la1_init_1();
jj_la1_init_2();
jj_la1_init_3();
}
private static void jj_la1_init_0() {
jj_la1_0 = new int[] {0x0,0x0,0x0,0x0,0x0,0x2,0x0,0xfffffe00,0xfffffe00,0xfffffe00,0x0,0x0,0x0,0xffffe1f8,0xffffe1f8,0x0,0x0,0xffffe1f8,0x0,0x1f8,0x0,0x0,0xfffffe00,0xffffe000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,};
}
private static void jj_la1_init_1() {
jj_la1_1 = new int[] {0x10000000,0xe00000,0x3000000,0x3000000,0xe00000,0x0,0x0,0x3,0x3,0x3,0x0,0x0,0xc000000,0x3fff,0x3fff,0x0,0x0,0x3fff,0x0,0x0,0x0,0xc000000,0x3,0x3fff,0x0,0x0,0x40000000,0x0,0x40000000,0x0,0xc000000,0xc000000,0xc000000,0xc000000,0x0,0x4000,0x4000,0x0,0x0,0x0,};
}
private static void jj_la1_init_2() {
jj_la1_2 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x38000,0xf8216,0x10,0xf8216,0x1,0x10,0xbc03a000,0xfa812,0xfa812,0x1,0x10,0xf8802,0x80000080,0x0,0xc0002,0xbc03a000,0x0,0x0,0x1,0x40,0xa0,0x38204,0xa0,0x38200,0xbc038000,0xbc038000,0xbc000000,0x0,0x80000000,0x0,0x0,0x3c000000,0x38000,0x30000,};
}
private static void jj_la1_init_3() {
jj_la1_3 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3,0x3,0x3,0x0,0x3,0x8,0x8,0x0,0x0,0x0,};
}
/** Constructor with InputStream. */
public ShaclCompactParserJJ(java.io.InputStream stream) {
this(stream, null);
}
/** Constructor with InputStream and supplied encoding */
public ShaclCompactParserJJ(java.io.InputStream stream, String encoding) {
try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
token_source = new ShaclCompactParserJJTokenManager(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 40; i++) jj_la1[i] = -1;
}
/** Reinitialise. */
public void ReInit(java.io.InputStream stream) {
ReInit(stream, null);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream stream, String encoding) {
try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
token_source.ReInit(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 40; i++) jj_la1[i] = -1;
}
/** Constructor. */
public ShaclCompactParserJJ(java.io.Reader stream) {
jj_input_stream = new SimpleCharStream(stream, 1, 1);
token_source = new ShaclCompactParserJJTokenManager(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 40; i++) jj_la1[i] = -1;
}
/** Reinitialise. */
public void ReInit(java.io.Reader stream) {
if (jj_input_stream == null) {
jj_input_stream = new SimpleCharStream(stream, 1, 1);
} else {
jj_input_stream.ReInit(stream, 1, 1);
}
if (token_source == null) {
token_source = new ShaclCompactParserJJTokenManager(jj_input_stream);
}
token_source.ReInit(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 40; i++) jj_la1[i] = -1;
}
/** Constructor with generated Token Manager. */
public ShaclCompactParserJJ(ShaclCompactParserJJTokenManager tm) {
token_source = tm;
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 40; i++) jj_la1[i] = -1;
}
/** Reinitialise. */
public void ReInit(ShaclCompactParserJJTokenManager tm) {
token_source = tm;
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 40; i++) jj_la1[i] = -1;
}
private Token jj_consume_token(int kind) throws ParseException {
Token oldToken;
if ((oldToken = token).next != null) token = token.next;
else token = token.next = token_source.getNextToken();
jj_ntk = -1;
if (token.kind == kind) {
jj_gen++;
return token;
}
token = oldToken;
jj_kind = kind;
throw generateParseException();
}
/** Get the next Token. */
final public Token getNextToken() {
if (token.next != null) token = token.next;
else token = token.next = token_source.getNextToken();
jj_ntk = -1;
jj_gen++;
return token;
}
/** Get the specific Token. */
final public Token getToken(int index) {
Token t = token;
for (int i = 0; i < index; i++) {
if (t.next != null) t = t.next;
else t = t.next = token_source.getNextToken();
}
return t;
}
private int jj_ntk_f() {
if ((jj_nt=token.next) == null)
return (jj_ntk = (token.next=token_source.getNextToken()).kind);
else
return (jj_ntk = jj_nt.kind);
}
private java.util.List<int[]> jj_expentries = new java.util.ArrayList<int[]>();
private int[] jj_expentry;
private int jj_kind = -1;
/** Generate ParseException. */
public ParseException generateParseException() {
jj_expentries.clear();
boolean[] la1tokens = new boolean[113];
if (jj_kind >= 0) {
la1tokens[jj_kind] = true;
jj_kind = -1;
}
for (int i = 0; i < 40; i++) {
if (jj_la1[i] == jj_gen) {
for (int j = 0; j < 32; j++) {
if ((jj_la1_0[i] & (1<<j)) != 0) {
la1tokens[j] = true;
}
if ((jj_la1_1[i] & (1<<j)) != 0) {
la1tokens[32+j] = true;
}
if ((jj_la1_2[i] & (1<<j)) != 0) {
la1tokens[64+j] = true;
}
if ((jj_la1_3[i] & (1<<j)) != 0) {
la1tokens[96+j] = true;
}
}
}
}
for (int i = 0; i < 113; i++) {
if (la1tokens[i]) {
jj_expentry = new int[1];
jj_expentry[0] = i;
jj_expentries.add(jj_expentry);
}
}
int[][] exptokseq = new int[jj_expentries.size()][];
for (int i = 0; i < jj_expentries.size(); i++) {
exptokseq[i] = jj_expentries.get(i);
}
return new ParseException(token, exptokseq, tokenImage);
}
private boolean trace_enabled;
/** Trace enabled. */
final public boolean trace_enabled() {
return trace_enabled;
}
/** Enable tracing. */
final public void enable_tracing() {
}
/** Disable tracing. */
final public void disable_tracing() {
}
}
|
googleapis/google-cloud-java | 38,191 | java-networkservices/proto-google-cloud-networkservices-v1/src/main/java/com/google/cloud/networkservices/v1/UpdateEndpointPolicyRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkservices/v1/endpoint_policy.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkservices.v1;
/**
*
*
* <pre>
* Request used with the UpdateEndpointPolicy method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateEndpointPolicyRequest}
*/
public final class UpdateEndpointPolicyRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkservices.v1.UpdateEndpointPolicyRequest)
UpdateEndpointPolicyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateEndpointPolicyRequest.newBuilder() to construct.
private UpdateEndpointPolicyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateEndpointPolicyRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateEndpointPolicyRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.EndpointPolicyProto
.internal_static_google_cloud_networkservices_v1_UpdateEndpointPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.EndpointPolicyProto
.internal_static_google_cloud_networkservices_v1_UpdateEndpointPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest.class,
com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int ENDPOINT_POLICY_FIELD_NUMBER = 2;
private com.google.cloud.networkservices.v1.EndpointPolicy endpointPolicy_;
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the endpointPolicy field is set.
*/
@java.lang.Override
public boolean hasEndpointPolicy() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The endpointPolicy.
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.EndpointPolicy getEndpointPolicy() {
return endpointPolicy_ == null
? com.google.cloud.networkservices.v1.EndpointPolicy.getDefaultInstance()
: endpointPolicy_;
}
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.EndpointPolicyOrBuilder getEndpointPolicyOrBuilder() {
return endpointPolicy_ == null
? com.google.cloud.networkservices.v1.EndpointPolicy.getDefaultInstance()
: endpointPolicy_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getEndpointPolicy());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEndpointPolicy());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest)) {
return super.equals(obj);
}
com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest other =
(com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasEndpointPolicy() != other.hasEndpointPolicy()) return false;
if (hasEndpointPolicy()) {
if (!getEndpointPolicy().equals(other.getEndpointPolicy())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasEndpointPolicy()) {
hash = (37 * hash) + ENDPOINT_POLICY_FIELD_NUMBER;
hash = (53 * hash) + getEndpointPolicy().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request used with the UpdateEndpointPolicy method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateEndpointPolicyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkservices.v1.UpdateEndpointPolicyRequest)
com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.EndpointPolicyProto
.internal_static_google_cloud_networkservices_v1_UpdateEndpointPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.EndpointPolicyProto
.internal_static_google_cloud_networkservices_v1_UpdateEndpointPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest.class,
com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest.Builder.class);
}
// Construct using com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getEndpointPolicyFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
endpointPolicy_ = null;
if (endpointPolicyBuilder_ != null) {
endpointPolicyBuilder_.dispose();
endpointPolicyBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkservices.v1.EndpointPolicyProto
.internal_static_google_cloud_networkservices_v1_UpdateEndpointPolicyRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest
getDefaultInstanceForType() {
return com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest build() {
com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest buildPartial() {
com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest result =
new com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.endpointPolicy_ =
endpointPolicyBuilder_ == null ? endpointPolicy_ : endpointPolicyBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest) {
return mergeFrom((com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest other) {
if (other
== com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasEndpointPolicy()) {
mergeEndpointPolicy(other.getEndpointPolicy());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getEndpointPolicyFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* EndpointPolicy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.networkservices.v1.EndpointPolicy endpointPolicy_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.EndpointPolicy,
com.google.cloud.networkservices.v1.EndpointPolicy.Builder,
com.google.cloud.networkservices.v1.EndpointPolicyOrBuilder>
endpointPolicyBuilder_;
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the endpointPolicy field is set.
*/
public boolean hasEndpointPolicy() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The endpointPolicy.
*/
public com.google.cloud.networkservices.v1.EndpointPolicy getEndpointPolicy() {
if (endpointPolicyBuilder_ == null) {
return endpointPolicy_ == null
? com.google.cloud.networkservices.v1.EndpointPolicy.getDefaultInstance()
: endpointPolicy_;
} else {
return endpointPolicyBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEndpointPolicy(com.google.cloud.networkservices.v1.EndpointPolicy value) {
if (endpointPolicyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
endpointPolicy_ = value;
} else {
endpointPolicyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEndpointPolicy(
com.google.cloud.networkservices.v1.EndpointPolicy.Builder builderForValue) {
if (endpointPolicyBuilder_ == null) {
endpointPolicy_ = builderForValue.build();
} else {
endpointPolicyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeEndpointPolicy(com.google.cloud.networkservices.v1.EndpointPolicy value) {
if (endpointPolicyBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& endpointPolicy_ != null
&& endpointPolicy_
!= com.google.cloud.networkservices.v1.EndpointPolicy.getDefaultInstance()) {
getEndpointPolicyBuilder().mergeFrom(value);
} else {
endpointPolicy_ = value;
}
} else {
endpointPolicyBuilder_.mergeFrom(value);
}
if (endpointPolicy_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearEndpointPolicy() {
bitField0_ = (bitField0_ & ~0x00000002);
endpointPolicy_ = null;
if (endpointPolicyBuilder_ != null) {
endpointPolicyBuilder_.dispose();
endpointPolicyBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.EndpointPolicy.Builder getEndpointPolicyBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getEndpointPolicyFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.EndpointPolicyOrBuilder
getEndpointPolicyOrBuilder() {
if (endpointPolicyBuilder_ != null) {
return endpointPolicyBuilder_.getMessageOrBuilder();
} else {
return endpointPolicy_ == null
? com.google.cloud.networkservices.v1.EndpointPolicy.getDefaultInstance()
: endpointPolicy_;
}
}
/**
*
*
* <pre>
* Required. Updated EndpointPolicy resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.EndpointPolicy endpoint_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.EndpointPolicy,
com.google.cloud.networkservices.v1.EndpointPolicy.Builder,
com.google.cloud.networkservices.v1.EndpointPolicyOrBuilder>
getEndpointPolicyFieldBuilder() {
if (endpointPolicyBuilder_ == null) {
endpointPolicyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.EndpointPolicy,
com.google.cloud.networkservices.v1.EndpointPolicy.Builder,
com.google.cloud.networkservices.v1.EndpointPolicyOrBuilder>(
getEndpointPolicy(), getParentForChildren(), isClean());
endpointPolicy_ = null;
}
return endpointPolicyBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkservices.v1.UpdateEndpointPolicyRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkservices.v1.UpdateEndpointPolicyRequest)
private static final com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest();
}
public static com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateEndpointPolicyRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateEndpointPolicyRequest>() {
@java.lang.Override
public UpdateEndpointPolicyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateEndpointPolicyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateEndpointPolicyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateEndpointPolicyRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/rocketmq | 38,461 | client/src/test/java/org/apache/rocketmq/client/impl/consumer/DefaultMQPushConsumerImplTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.client.impl.consumer;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.rocketmq.client.consumer.AckCallback;
import org.apache.rocketmq.client.consumer.AckResult;
import org.apache.rocketmq.client.consumer.AckStatus;
import org.apache.rocketmq.client.consumer.DefaultMQPushConsumer;
import org.apache.rocketmq.client.consumer.MessageSelector;
import org.apache.rocketmq.client.consumer.PopCallback;
import org.apache.rocketmq.client.consumer.PopResult;
import org.apache.rocketmq.client.consumer.PopStatus;
import org.apache.rocketmq.client.consumer.PullCallback;
import org.apache.rocketmq.client.consumer.PullResult;
import org.apache.rocketmq.client.consumer.PullStatus;
import org.apache.rocketmq.client.consumer.listener.ConsumeConcurrentlyStatus;
import org.apache.rocketmq.client.consumer.listener.MessageListenerConcurrently;
import org.apache.rocketmq.client.consumer.store.OffsetStore;
import org.apache.rocketmq.client.exception.MQBrokerException;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.hook.ConsumeMessageContext;
import org.apache.rocketmq.client.hook.ConsumeMessageHook;
import org.apache.rocketmq.client.hook.FilterMessageContext;
import org.apache.rocketmq.client.hook.FilterMessageHook;
import org.apache.rocketmq.client.impl.CommunicationMode;
import org.apache.rocketmq.client.impl.FindBrokerResult;
import org.apache.rocketmq.client.impl.MQAdminImpl;
import org.apache.rocketmq.client.impl.MQClientAPIImpl;
import org.apache.rocketmq.client.impl.factory.MQClientInstance;
import org.apache.rocketmq.client.stat.ConsumerStatsManager;
import org.apache.rocketmq.common.MixAll;
import org.apache.rocketmq.common.ServiceState;
import org.apache.rocketmq.common.message.MessageConst;
import org.apache.rocketmq.common.message.MessageExt;
import org.apache.rocketmq.common.message.MessageQueue;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.remoting.exception.RemotingException;
import org.apache.rocketmq.remoting.protocol.body.ConsumeStatus;
import org.apache.rocketmq.remoting.protocol.body.ConsumerRunningInfo;
import org.apache.rocketmq.remoting.protocol.body.QueueTimeSpan;
import org.apache.rocketmq.remoting.protocol.header.AckMessageRequestHeader;
import org.apache.rocketmq.remoting.protocol.header.ChangeInvisibleTimeRequestHeader;
import org.apache.rocketmq.remoting.protocol.heartbeat.SubscriptionData;
import org.apache.rocketmq.remoting.protocol.route.BrokerData;
import org.apache.rocketmq.remoting.protocol.route.TopicRouteData;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class DefaultMQPushConsumerImplTest {
@Mock
private DefaultMQPushConsumer defaultMQPushConsumer;
@Mock
private MQClientInstance mQClientFactory;
@Mock
private RebalanceImpl rebalanceImpl;
@Mock
private PullAPIWrapper pullAPIWrapper;
@Mock
private PullRequest pullRequest;
@Mock
private PopRequest popRequest;
@Mock
private ProcessQueue processQueue;
@Mock
private PopProcessQueue popProcessQueue;
@Mock
private MQClientAPIImpl mqClientAPIImpl;
@Mock
private OffsetStore offsetStore;
private DefaultMQPushConsumerImpl defaultMQPushConsumerImpl;
@Rule
public ExpectedException thrown = ExpectedException.none();
private final String defaultKey = "defaultKey";
private final String defaultTopic = "defaultTopic";
private final String defaultBroker = "defaultBroker";
private final String defaultBrokerAddr = "127.0.0.1:10911";
private final String defaultGroup = "defaultGroup";
private final long defaultTimeout = 3000L;
@Test
public void checkConfigTest() throws MQClientException {
//test type
thrown.expect(MQClientException.class);
//test message
thrown.expectMessage("consumeThreadMin (10) is larger than consumeThreadMax (9)");
DefaultMQPushConsumer consumer = new DefaultMQPushConsumer("test_consumer_group");
consumer.setConsumeThreadMin(10);
consumer.setConsumeThreadMax(9);
consumer.registerMessageListener((MessageListenerConcurrently) (msgs, context) -> ConsumeConcurrentlyStatus.CONSUME_SUCCESS);
DefaultMQPushConsumerImpl defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(consumer, null);
defaultMQPushConsumerImpl.start();
}
@Test
public void testHook() {
DefaultMQPushConsumerImpl defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(defaultMQPushConsumer, null);
defaultMQPushConsumerImpl.registerConsumeMessageHook(new ConsumeMessageHook() {
@Override
public String hookName() {
return "consumerHook";
}
@Override
public void consumeMessageBefore(ConsumeMessageContext context) {
assertThat(context).isNotNull();
}
@Override
public void consumeMessageAfter(ConsumeMessageContext context) {
assertThat(context).isNotNull();
}
});
defaultMQPushConsumerImpl.registerFilterMessageHook(new FilterMessageHook() {
@Override
public String hookName() {
return "filterHook";
}
@Override
public void filterMessage(FilterMessageContext context) {
assertThat(context).isNotNull();
}
});
defaultMQPushConsumerImpl.executeHookBefore(new ConsumeMessageContext());
defaultMQPushConsumerImpl.executeHookAfter(new ConsumeMessageContext());
}
@Ignore
@Test
public void testPush() throws Exception {
when(defaultMQPushConsumer.getMessageListener()).thenReturn((MessageListenerConcurrently) (msgs, context) -> {
assertThat(msgs).size().isGreaterThan(0);
assertThat(context).isNotNull();
return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
});
DefaultMQPushConsumerImpl defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(defaultMQPushConsumer, null);
try {
defaultMQPushConsumerImpl.start();
} finally {
defaultMQPushConsumerImpl.shutdown();
}
}
@Before
public void init() throws NoSuchFieldException, IllegalAccessException {
MQAdminImpl mqAdminImpl = mock(MQAdminImpl.class);
when(mQClientFactory.getMQAdminImpl()).thenReturn(mqAdminImpl);
ConsumerStatsManager consumerStatsManager = mock(ConsumerStatsManager.class);
ConsumeStatus consumeStatus = mock(ConsumeStatus.class);
when(consumerStatsManager.consumeStatus(any(), any())).thenReturn(consumeStatus);
when(mQClientFactory.getConsumerStatsManager()).thenReturn(consumerStatsManager);
when(mQClientFactory.getPullMessageService()).thenReturn(mock(PullMessageService.class));
when(mQClientFactory.getMQClientAPIImpl()).thenReturn(mqClientAPIImpl);
FindBrokerResult findBrokerResult = mock(FindBrokerResult.class);
when(findBrokerResult.getBrokerAddr()).thenReturn(defaultBrokerAddr);
when(mQClientFactory.findBrokerAddressInSubscribe(anyString(), anyLong(), anyBoolean())).thenReturn(findBrokerResult);
Set<MessageQueue> messageQueueSet = Collections.singleton(createMessageQueue());
ConcurrentMap<String, Set<MessageQueue>> topicMessageQueueMap = new ConcurrentHashMap<>();
topicMessageQueueMap.put(defaultTopic, messageQueueSet);
when(rebalanceImpl.getTopicSubscribeInfoTable()).thenReturn(topicMessageQueueMap);
ConcurrentMap<MessageQueue, ProcessQueue> processQueueTable = new ConcurrentHashMap<>();
when(rebalanceImpl.getProcessQueueTable()).thenReturn(processQueueTable);
RPCHook rpcHook = mock(RPCHook.class);
defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(defaultMQPushConsumer, rpcHook);
defaultMQPushConsumerImpl.setOffsetStore(offsetStore);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "mQClientFactory", mQClientFactory, true);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "rebalanceImpl", rebalanceImpl, true);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "pullAPIWrapper", pullAPIWrapper, true);
FilterMessageHook filterMessageHook = mock(FilterMessageHook.class);
ArrayList<FilterMessageHook> filterMessageHookList = new ArrayList<>();
filterMessageHookList.add(filterMessageHook);
ConsumeMessageService consumeMessagePopService = mock(ConsumeMessageService.class);
ConsumeMessageService consumeMessageService = mock(ConsumeMessageService.class);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "filterMessageHookList", filterMessageHookList, true);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "consumeMessageService", consumeMessageService, true);
FieldUtils.writeDeclaredField(defaultMQPushConsumerImpl, "consumeMessagePopService", consumeMessagePopService, true);
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
SubscriptionData subscriptionData = new SubscriptionData();
subscriptionData.setTopic(defaultTopic);
subscriptionDataMap.put(defaultTopic, subscriptionData);
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
}
@Test
public void testFetchSubscribeMessageQueues() throws MQClientException {
Set<MessageQueue> actual = defaultMQPushConsumerImpl.fetchSubscribeMessageQueues(defaultTopic);
assertNotNull(actual);
Assert.assertEquals(1, actual.size());
MessageQueue next = actual.iterator().next();
assertEquals(defaultTopic, next.getTopic());
assertEquals(defaultBroker, next.getBrokerName());
assertEquals(0, next.getQueueId());
}
@Test
public void testEarliestMsgStoreTime() throws MQClientException {
assertEquals(0, defaultMQPushConsumerImpl.earliestMsgStoreTime(createMessageQueue()));
}
@Test
public void testMaxOffset() throws MQClientException {
assertEquals(0, defaultMQPushConsumerImpl.maxOffset(createMessageQueue()));
}
@Test
public void testMinOffset() throws MQClientException {
assertEquals(0, defaultMQPushConsumerImpl.minOffset(createMessageQueue()));
}
@Test
public void testGetOffsetStore() {
assertEquals(offsetStore, defaultMQPushConsumerImpl.getOffsetStore());
}
@Test
public void testPullMessageWithStateNotOk() {
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithIsPause() {
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.setPause(true);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithMsgCountFlowControl() {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
TreeMap<Long, MessageExt> treeMap = new TreeMap<>();
treeMap.put(1L, new MessageExt());
when(processQueue.getMsgTreeMap()).thenReturn(treeMap);
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(1);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithMsgSizeFlowControl() {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
TreeMap<Long, MessageExt> treeMap = new TreeMap<>();
treeMap.put(1L, new MessageExt());
when(processQueue.getMsgTreeMap()).thenReturn(treeMap);
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(1);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithMaxSpanFlowControl() {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMaxSpan()).thenReturn(2L);
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
TreeMap<Long, MessageExt> treeMap = new TreeMap<>();
treeMap.put(1L, new MessageExt());
when(processQueue.getMsgTreeMap()).thenReturn(treeMap);
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithNotLocked() {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.setConsumeOrderly(true);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithSubscriptionDataIsNull() {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
when(pullRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithNoMatchedMsg() throws MQBrokerException, RemotingException, InterruptedException, MQClientException {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
when(pullRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
PullResult pullResultMock = mock(PullResult.class);
when(pullAPIWrapper.processPullResult(any(MessageQueue.class), any(PullResult.class), any(SubscriptionData.class))).thenReturn(pullResultMock);
when(pullResultMock.getPullStatus()).thenReturn(PullStatus.NO_MATCHED_MSG);
doAnswer(invocation -> {
PullCallback callback = invocation.getArgument(12);
PullResult pullResult = mock(PullResult.class);
callback.onSuccess(pullResult);
return null;
}).when(pullAPIWrapper).pullKernelImpl(
any(MessageQueue.class),
any(),
any(),
anyLong(),
anyLong(),
anyInt(),
anyInt(),
anyInt(),
anyLong(),
anyLong(),
anyLong(),
any(CommunicationMode.class),
any(PullCallback.class));
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithOffsetIllegal() throws MQBrokerException, RemotingException, InterruptedException, MQClientException {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
when(pullRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
PullResult pullResultMock = mock(PullResult.class);
when(pullAPIWrapper.processPullResult(any(MessageQueue.class), any(PullResult.class), any(SubscriptionData.class))).thenReturn(pullResultMock);
when(pullResultMock.getPullStatus()).thenReturn(PullStatus.OFFSET_ILLEGAL);
doAnswer(invocation -> {
PullCallback callback = invocation.getArgument(12);
PullResult pullResult = mock(PullResult.class);
callback.onSuccess(pullResult);
return null;
}).when(pullAPIWrapper).pullKernelImpl(
any(MessageQueue.class),
any(),
any(),
anyLong(),
anyLong(),
anyInt(),
anyInt(),
anyInt(),
anyLong(),
anyLong(),
anyLong(),
any(CommunicationMode.class),
any(PullCallback.class));
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPullMessageWithException() throws MQBrokerException, RemotingException, InterruptedException, MQClientException {
when(processQueue.getMsgCount()).thenReturn(new AtomicLong(2));
when(processQueue.getMsgSize()).thenReturn(new AtomicLong(3 * 1024 * 1024));
when(pullRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(pullRequest.getProcessQueue()).thenReturn(processQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPullThresholdForQueue()).thenReturn(3);
when(defaultMQPushConsumer.getPullThresholdSizeForQueue()).thenReturn(10);
doAnswer(invocation -> {
PullCallback callback = invocation.getArgument(12);
callback.onException(new RuntimeException("exception"));
return null;
}).when(pullAPIWrapper).pullKernelImpl(
any(MessageQueue.class),
any(),
any(),
anyLong(),
anyLong(),
anyInt(),
anyInt(),
anyInt(),
anyLong(),
anyLong(),
anyLong(),
any(CommunicationMode.class),
any(PullCallback.class));
defaultMQPushConsumerImpl.pullMessage(pullRequest);
}
@Test
public void testPopMessageWithFound() throws RemotingException, InterruptedException, MQClientException {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(popRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(popRequest.getConsumerGroup()).thenReturn(defaultGroup);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
SubscriptionData subscriptionData = new SubscriptionData();
subscriptionData.setTagsSet(Collections.singleton("*"));
subscriptionDataMap.put(defaultTopic, subscriptionData);
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
doAnswer(invocation -> {
PopCallback callback = invocation.getArgument(5);
PopResult popResult = mock(PopResult.class);
when(popResult.getPopStatus()).thenReturn(PopStatus.FOUND);
when(popResult.getMsgFoundList()).thenReturn(Collections.singletonList(createMessageExt()));
callback.onSuccess(popResult);
return null;
}).when(pullAPIWrapper).popAsync(
any(MessageQueue.class),
anyLong(),
anyInt(),
any(),
anyLong(),
any(PopCallback.class),
anyBoolean(),
anyInt(),
anyBoolean(),
any(),
any());
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithException() throws RemotingException, InterruptedException, MQClientException {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(popRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(popRequest.getConsumerGroup()).thenReturn(defaultGroup);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
SubscriptionData subscriptionData = new SubscriptionData();
subscriptionData.setTagsSet(Collections.singleton("*"));
subscriptionDataMap.put(defaultTopic, subscriptionData);
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
doAnswer(invocation -> {
PopCallback callback = invocation.getArgument(5);
callback.onException(new RuntimeException("exception"));
return null;
}).when(pullAPIWrapper).popAsync(
any(MessageQueue.class),
anyLong(),
anyInt(),
any(),
anyLong(),
any(PopCallback.class),
anyBoolean(),
anyInt(),
anyBoolean(),
any(),
any());
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithNoNewMsg() throws RemotingException, InterruptedException, MQClientException {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(popRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(popRequest.getConsumerGroup()).thenReturn(defaultGroup);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
SubscriptionData subscriptionData = new SubscriptionData();
subscriptionData.setTagsSet(Collections.singleton("*"));
subscriptionDataMap.put(defaultTopic, subscriptionData);
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
doAnswer(invocation -> {
PopCallback callback = invocation.getArgument(5);
PopResult popResult = mock(PopResult.class);
when(popResult.getPopStatus()).thenReturn(PopStatus.NO_NEW_MSG);
callback.onSuccess(popResult);
return null;
}).when(pullAPIWrapper).popAsync(
any(MessageQueue.class),
anyLong(),
anyInt(),
any(),
anyLong(),
any(PopCallback.class),
anyBoolean(),
anyInt(),
anyBoolean(),
any(),
any());
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithPollingFull() throws RemotingException, InterruptedException, MQClientException {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(popRequest.getMessageQueue()).thenReturn(createMessageQueue());
when(popRequest.getConsumerGroup()).thenReturn(defaultGroup);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
SubscriptionData subscriptionData = new SubscriptionData();
subscriptionData.setTagsSet(Collections.singleton("*"));
subscriptionDataMap.put(defaultTopic, subscriptionData);
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
doAnswer(invocation -> {
PopCallback callback = invocation.getArgument(5);
PopResult popResult = mock(PopResult.class);
when(popResult.getPopStatus()).thenReturn(PopStatus.POLLING_FULL);
callback.onSuccess(popResult);
return null;
}).when(pullAPIWrapper).popAsync(any(
MessageQueue.class),
anyLong(),
anyInt(),
any(),
anyLong(),
any(PopCallback.class),
anyBoolean(),
anyInt(),
anyBoolean(),
any(),
any());
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithStateNotOk() {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithIsPause() {
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.setPause(true);
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithWaiAckMsgCountFlowControl() {
when(popProcessQueue.getWaiAckMsgCount()).thenReturn(2);
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(defaultMQPushConsumer.getPopThresholdForQueue()).thenReturn(1);
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.popMessage(popRequest);
}
@Test
public void testPopMessageWithSubscriptionDataIsNull() throws RemotingException, InterruptedException, MQClientException {
when(popProcessQueue.getWaiAckMsgCount()).thenReturn(2);
when(popRequest.getPopProcessQueue()).thenReturn(popProcessQueue);
when(popRequest.getMessageQueue()).thenReturn(createMessageQueue());
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
when(defaultMQPushConsumer.getPopThresholdForQueue()).thenReturn(3);
defaultMQPushConsumerImpl.popMessage(popRequest);
verify(pullAPIWrapper).popAsync(any(MessageQueue.class),
eq(60000L),
eq(0),
any(),
eq(15000L),
any(PopCallback.class),
eq(true),
eq(0),
eq(false),
any(),
any());
}
@Test
public void testQueryMessage() throws InterruptedException, MQClientException {
assertNull(defaultMQPushConsumerImpl.queryMessage(defaultTopic, defaultKey, 1, 0, 1));
}
@Test
public void testQueryMessageByUniqKey() throws InterruptedException, MQClientException {
assertNull(defaultMQPushConsumerImpl.queryMessageByUniqKey(defaultTopic, defaultKey));
}
@Test
public void testSendMessageBack() throws InterruptedException, MQClientException, MQBrokerException, RemotingException {
when(mQClientFactory.findBrokerAddressInPublish(anyString())).thenReturn(defaultBrokerAddr);
defaultMQPushConsumerImpl.sendMessageBack(createMessageExt(), 1, createMessageQueue());
verify(mqClientAPIImpl).consumerSendMessageBack(
eq(defaultBrokerAddr),
eq(defaultBroker),
any(MessageExt.class),
any(),
eq(1),
eq(5000L),
eq(0));
}
@Test
public void testAckAsync() throws MQBrokerException, RemotingException, InterruptedException {
doAnswer(invocation -> {
AckCallback callback = invocation.getArgument(2);
AckResult result = mock(AckResult.class);
when(result.getStatus()).thenReturn(AckStatus.OK);
callback.onSuccess(result);
return null;
}).when(mqClientAPIImpl).ackMessageAsync(any(),
anyLong(),
any(AckCallback.class),
any(AckMessageRequestHeader.class));
defaultMQPushConsumerImpl.ackAsync(createMessageExt(), defaultGroup);
verify(mqClientAPIImpl).ackMessageAsync(eq(defaultBrokerAddr),
eq(3000L),
any(AckCallback.class),
any(AckMessageRequestHeader.class));
}
@Test
public void testChangePopInvisibleTimeAsync() throws MQBrokerException, RemotingException, InterruptedException, MQClientException {
AckCallback callback = mock(AckCallback.class);
String extraInfo = createMessageExt().getProperty(MessageConst.PROPERTY_POP_CK);
defaultMQPushConsumerImpl.changePopInvisibleTimeAsync(defaultTopic, defaultGroup, extraInfo, defaultTimeout, callback);
verify(mqClientAPIImpl).changeInvisibleTimeAsync(eq(defaultBroker),
eq(defaultBrokerAddr),
any(ChangeInvisibleTimeRequestHeader.class),
eq(defaultTimeout),
any(AckCallback.class));
}
@Test
public void testShutdown() {
defaultMQPushConsumerImpl.setServiceState(ServiceState.RUNNING);
defaultMQPushConsumerImpl.shutdown();
assertEquals(ServiceState.SHUTDOWN_ALREADY, defaultMQPushConsumerImpl.getServiceState());
}
@Test
public void testSubscribe() throws MQClientException {
defaultMQPushConsumerImpl.subscribe(defaultTopic, "fullClassname", "filterClassSource");
RebalanceImpl actual = defaultMQPushConsumerImpl.getRebalanceImpl();
assertEquals(1, actual.getSubscriptionInner().size());
}
@Test
public void testSubscribeByMessageSelector() throws MQClientException {
MessageSelector messageSelector = mock(MessageSelector.class);
defaultMQPushConsumerImpl.subscribe(defaultTopic, messageSelector);
RebalanceImpl actual = defaultMQPushConsumerImpl.getRebalanceImpl();
assertEquals(1, actual.getSubscriptionInner().size());
}
@Test
public void testSuspend() {
defaultMQPushConsumerImpl.suspend();
assertTrue(defaultMQPushConsumerImpl.isPause());
}
@Test
public void testViewMessage() throws InterruptedException, MQClientException, MQBrokerException, RemotingException {
assertNull(defaultMQPushConsumerImpl.viewMessage(defaultTopic, createMessageExt().getMsgId()));
}
@Test
public void testResetOffsetByTimeStamp() throws MQClientException {
ConcurrentMap<String, SubscriptionData> subscriptionDataMap = new ConcurrentHashMap<>();
subscriptionDataMap.put(defaultTopic, new SubscriptionData());
when(rebalanceImpl.getSubscriptionInner()).thenReturn(subscriptionDataMap);
defaultMQPushConsumerImpl.resetOffsetByTimeStamp(System.currentTimeMillis());
verify(mQClientFactory).resetOffset(eq(defaultTopic), any(), any());
}
@Test
public void testSearchOffset() throws MQClientException {
assertEquals(0, defaultMQPushConsumerImpl.searchOffset(createMessageQueue(), System.currentTimeMillis()));
}
@Test
public void testQueryConsumeTimeSpan() throws InterruptedException, MQClientException, MQBrokerException, RemotingException {
TopicRouteData topicRouteData = new TopicRouteData();
topicRouteData.getBrokerDatas().add(createBrokerData());
when(mqClientAPIImpl.getTopicRouteInfoFromNameServer(any(), anyLong())).thenReturn(topicRouteData);
List<QueueTimeSpan> actual = defaultMQPushConsumerImpl.queryConsumeTimeSpan(defaultTopic);
assertNotNull(actual);
assertEquals(0, actual.size());
}
@Test
public void testTryResetPopRetryTopic() {
TopicRouteData topicRouteData = new TopicRouteData();
topicRouteData.getBrokerDatas().add(createBrokerData());
MessageExt messageExt = createMessageExt();
List<MessageExt> msgs = new ArrayList<>();
messageExt.setTopic(MixAll.RETRY_GROUP_TOPIC_PREFIX + defaultGroup + "_" + defaultTopic);
msgs.add(messageExt);
defaultMQPushConsumerImpl.tryResetPopRetryTopic(msgs, defaultGroup);
assertEquals(defaultTopic, msgs.get(0).getTopic());
}
@Test
public void testGetPopDelayLevel() {
int[] actual = defaultMQPushConsumerImpl.getPopDelayLevel();
int[] expected = new int[]{10, 30, 60, 120, 180, 240, 300, 360, 420, 480, 540, 600, 1200, 1800, 3600, 7200};
assertArrayEquals(expected, actual);
}
@Test
public void testGetMessageQueueListener() {
assertNull(defaultMQPushConsumerImpl.getMessageQueueListener());
}
@Test
public void testConsumerRunningInfo() {
ConcurrentMap<MessageQueue, ProcessQueue> processQueueMap = new ConcurrentHashMap<>();
ConcurrentMap<MessageQueue, PopProcessQueue> popProcessQueueMap = new ConcurrentHashMap<>();
processQueueMap.put(createMessageQueue(), new ProcessQueue());
popProcessQueueMap.put(createMessageQueue(), new PopProcessQueue());
when(rebalanceImpl.getProcessQueueTable()).thenReturn(processQueueMap);
when(rebalanceImpl.getPopProcessQueueTable()).thenReturn(popProcessQueueMap);
ConsumerRunningInfo actual = defaultMQPushConsumerImpl.consumerRunningInfo();
assertNotNull(actual);
assertEquals(1, actual.getSubscriptionSet().size());
assertEquals(defaultTopic, actual.getSubscriptionSet().iterator().next().getTopic());
assertEquals(1, actual.getMqTable().size());
assertEquals(1, actual.getMqPopTable().size());
assertEquals(1, actual.getStatusTable().size());
}
private BrokerData createBrokerData() {
BrokerData result = new BrokerData();
HashMap<Long, String> brokerAddrMap = new HashMap<>();
brokerAddrMap.put(MixAll.MASTER_ID, defaultBrokerAddr);
result.setBrokerAddrs(brokerAddrMap);
result.setBrokerName(defaultBroker);
return result;
}
private MessageQueue createMessageQueue() {
MessageQueue result = new MessageQueue();
result.setQueueId(0);
result.setBrokerName(defaultBroker);
result.setTopic(defaultTopic);
return result;
}
private MessageExt createMessageExt() {
MessageExt result = new MessageExt();
result.setBody("body".getBytes(StandardCharsets.UTF_8));
result.setTopic(defaultTopic);
result.setBrokerName(defaultBroker);
result.putUserProperty("key", "value");
result.getProperties().put(MessageConst.PROPERTY_PRODUCER_GROUP, defaultGroup);
result.getProperties().put(MessageConst.PROPERTY_UNIQ_CLIENT_MESSAGE_ID_KEYIDX, "TX1");
long curTime = System.currentTimeMillis();
result.setBornTimestamp(curTime - 1000);
String popProps = String.format("%d %d %d %d %d %s %d %d %d", curTime, curTime, curTime, curTime, curTime, defaultBroker, 1, 0L, 1L);
result.getProperties().put(MessageConst.PROPERTY_POP_CK, popProps);
result.setKeys("keys");
result.setTags("*");
SocketAddress bornHost = new InetSocketAddress("127.0.0.1", 12911);
SocketAddress storeHost = new InetSocketAddress("127.0.0.1", 10911);
result.setBornHost(bornHost);
result.setStoreHost(storeHost);
return result;
}
}
|
googleapis/google-cloud-java | 38,168 | java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/UpdateDatabaseRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.biglake.v1alpha1;
/**
*
*
* <pre>
* Request message for the UpdateDatabase method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest}
*/
public final class UpdateDatabaseRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest)
UpdateDatabaseRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateDatabaseRequest.newBuilder() to construct.
private UpdateDatabaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateDatabaseRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateDatabaseRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.class,
com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.Builder.class);
}
private int bitField0_;
public static final int DATABASE_FIELD_NUMBER = 1;
private com.google.cloud.bigquery.biglake.v1alpha1.Database database_;
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the database field is set.
*/
@java.lang.Override
public boolean hasDatabase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The database.
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabase() {
return database_ == null
? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance()
: database_;
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder getDatabaseOrBuilder() {
return database_ == null
? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance()
: database_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getDatabase());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getDatabase());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest other =
(com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) obj;
if (hasDatabase() != other.hasDatabase()) return false;
if (hasDatabase()) {
if (!getDatabase().equals(other.getDatabase())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasDatabase()) {
hash = (37 * hash) + DATABASE_FIELD_NUMBER;
hash = (53 * hash) + getDatabase().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the UpdateDatabase method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest)
com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.class,
com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.Builder.class);
}
// Construct using com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDatabaseFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
database_ = null;
if (databaseBuilder_ != null) {
databaseBuilder_.dispose();
databaseBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_UpdateDatabaseRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest
getDefaultInstanceForType() {
return com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest build() {
com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest buildPartial() {
com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest result =
new com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.database_ = databaseBuilder_ == null ? database_ : databaseBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) {
return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest other) {
if (other
== com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest.getDefaultInstance())
return this;
if (other.hasDatabase()) {
mergeDatabase(other.getDatabase());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getDatabaseFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.bigquery.biglake.v1alpha1.Database database_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Database,
com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder>
databaseBuilder_;
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the database field is set.
*/
public boolean hasDatabase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The database.
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Database getDatabase() {
if (databaseBuilder_ == null) {
return database_ == null
? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance()
: database_;
} else {
return databaseBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDatabase(com.google.cloud.bigquery.biglake.v1alpha1.Database value) {
if (databaseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
database_ = value;
} else {
databaseBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDatabase(
com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder builderForValue) {
if (databaseBuilder_ == null) {
database_ = builderForValue.build();
} else {
databaseBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDatabase(com.google.cloud.bigquery.biglake.v1alpha1.Database value) {
if (databaseBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& database_ != null
&& database_
!= com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance()) {
getDatabaseBuilder().mergeFrom(value);
} else {
database_ = value;
}
} else {
databaseBuilder_.mergeFrom(value);
}
if (database_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDatabase() {
bitField0_ = (bitField0_ & ~0x00000001);
database_ = null;
if (databaseBuilder_ != null) {
databaseBuilder_.dispose();
databaseBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder getDatabaseBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getDatabaseFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder getDatabaseOrBuilder() {
if (databaseBuilder_ != null) {
return databaseBuilder_.getMessageOrBuilder();
} else {
return database_ == null
? com.google.cloud.bigquery.biglake.v1alpha1.Database.getDefaultInstance()
: database_;
}
}
/**
*
*
* <pre>
* Required. The database to update.
*
* The database's `name` field is used to identify the database to update.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1alpha1.Database database = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Database,
com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder>
getDatabaseFieldBuilder() {
if (databaseBuilder_ == null) {
databaseBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Database,
com.google.cloud.bigquery.biglake.v1alpha1.Database.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.DatabaseOrBuilder>(
getDatabase(), getParentForChildren(), isClean());
database_ = null;
}
return databaseBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The list of fields to update.
*
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* If not set, defaults to all of the fields that are allowed to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest)
private static final com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest();
}
public static com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateDatabaseRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateDatabaseRequest>() {
@java.lang.Override
public UpdateDatabaseRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateDatabaseRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateDatabaseRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.UpdateDatabaseRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,096 | java-websecurityscanner/proto-google-cloud-websecurityscanner-v1beta/src/main/java/com/google/cloud/websecurityscanner/v1beta/ListCrawledUrlsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/websecurityscanner/v1beta/web_security_scanner.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.websecurityscanner.v1beta;
/**
*
*
* <pre>
* Response for the `ListCrawledUrls` method.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse}
*/
public final class ListCrawledUrlsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse)
ListCrawledUrlsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCrawledUrlsResponse.newBuilder() to construct.
private ListCrawledUrlsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCrawledUrlsResponse() {
crawledUrls_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListCrawledUrlsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_ListCrawledUrlsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_ListCrawledUrlsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse.class,
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse.Builder.class);
}
public static final int CRAWLED_URLS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.websecurityscanner.v1beta.CrawledUrl> crawledUrls_;
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.websecurityscanner.v1beta.CrawledUrl>
getCrawledUrlsList() {
return crawledUrls_;
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.websecurityscanner.v1beta.CrawledUrlOrBuilder>
getCrawledUrlsOrBuilderList() {
return crawledUrls_;
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
@java.lang.Override
public int getCrawledUrlsCount() {
return crawledUrls_.size();
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.CrawledUrl getCrawledUrls(int index) {
return crawledUrls_.get(index);
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.CrawledUrlOrBuilder getCrawledUrlsOrBuilder(
int index) {
return crawledUrls_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < crawledUrls_.size(); i++) {
output.writeMessage(1, crawledUrls_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < crawledUrls_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, crawledUrls_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse)) {
return super.equals(obj);
}
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse other =
(com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse) obj;
if (!getCrawledUrlsList().equals(other.getCrawledUrlsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCrawledUrlsCount() > 0) {
hash = (37 * hash) + CRAWLED_URLS_FIELD_NUMBER;
hash = (53 * hash) + getCrawledUrlsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for the `ListCrawledUrls` method.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse)
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_ListCrawledUrlsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_ListCrawledUrlsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse.class,
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse.Builder.class);
}
// Construct using
// com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (crawledUrlsBuilder_ == null) {
crawledUrls_ = java.util.Collections.emptyList();
} else {
crawledUrls_ = null;
crawledUrlsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_ListCrawledUrlsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse
getDefaultInstanceForType() {
return com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse build() {
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse buildPartial() {
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse result =
new com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse result) {
if (crawledUrlsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
crawledUrls_ = java.util.Collections.unmodifiableList(crawledUrls_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.crawledUrls_ = crawledUrls_;
} else {
result.crawledUrls_ = crawledUrlsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse) {
return mergeFrom(
(com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse other) {
if (other
== com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse
.getDefaultInstance()) return this;
if (crawledUrlsBuilder_ == null) {
if (!other.crawledUrls_.isEmpty()) {
if (crawledUrls_.isEmpty()) {
crawledUrls_ = other.crawledUrls_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCrawledUrlsIsMutable();
crawledUrls_.addAll(other.crawledUrls_);
}
onChanged();
}
} else {
if (!other.crawledUrls_.isEmpty()) {
if (crawledUrlsBuilder_.isEmpty()) {
crawledUrlsBuilder_.dispose();
crawledUrlsBuilder_ = null;
crawledUrls_ = other.crawledUrls_;
bitField0_ = (bitField0_ & ~0x00000001);
crawledUrlsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getCrawledUrlsFieldBuilder()
: null;
} else {
crawledUrlsBuilder_.addAllMessages(other.crawledUrls_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.websecurityscanner.v1beta.CrawledUrl m =
input.readMessage(
com.google.cloud.websecurityscanner.v1beta.CrawledUrl.parser(),
extensionRegistry);
if (crawledUrlsBuilder_ == null) {
ensureCrawledUrlsIsMutable();
crawledUrls_.add(m);
} else {
crawledUrlsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.websecurityscanner.v1beta.CrawledUrl> crawledUrls_ =
java.util.Collections.emptyList();
private void ensureCrawledUrlsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
crawledUrls_ =
new java.util.ArrayList<com.google.cloud.websecurityscanner.v1beta.CrawledUrl>(
crawledUrls_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.websecurityscanner.v1beta.CrawledUrl,
com.google.cloud.websecurityscanner.v1beta.CrawledUrl.Builder,
com.google.cloud.websecurityscanner.v1beta.CrawledUrlOrBuilder>
crawledUrlsBuilder_;
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public java.util.List<com.google.cloud.websecurityscanner.v1beta.CrawledUrl>
getCrawledUrlsList() {
if (crawledUrlsBuilder_ == null) {
return java.util.Collections.unmodifiableList(crawledUrls_);
} else {
return crawledUrlsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public int getCrawledUrlsCount() {
if (crawledUrlsBuilder_ == null) {
return crawledUrls_.size();
} else {
return crawledUrlsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public com.google.cloud.websecurityscanner.v1beta.CrawledUrl getCrawledUrls(int index) {
if (crawledUrlsBuilder_ == null) {
return crawledUrls_.get(index);
} else {
return crawledUrlsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public Builder setCrawledUrls(
int index, com.google.cloud.websecurityscanner.v1beta.CrawledUrl value) {
if (crawledUrlsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCrawledUrlsIsMutable();
crawledUrls_.set(index, value);
onChanged();
} else {
crawledUrlsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public Builder setCrawledUrls(
int index, com.google.cloud.websecurityscanner.v1beta.CrawledUrl.Builder builderForValue) {
if (crawledUrlsBuilder_ == null) {
ensureCrawledUrlsIsMutable();
crawledUrls_.set(index, builderForValue.build());
onChanged();
} else {
crawledUrlsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public Builder addCrawledUrls(com.google.cloud.websecurityscanner.v1beta.CrawledUrl value) {
if (crawledUrlsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCrawledUrlsIsMutable();
crawledUrls_.add(value);
onChanged();
} else {
crawledUrlsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public Builder addCrawledUrls(
int index, com.google.cloud.websecurityscanner.v1beta.CrawledUrl value) {
if (crawledUrlsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCrawledUrlsIsMutable();
crawledUrls_.add(index, value);
onChanged();
} else {
crawledUrlsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public Builder addCrawledUrls(
com.google.cloud.websecurityscanner.v1beta.CrawledUrl.Builder builderForValue) {
if (crawledUrlsBuilder_ == null) {
ensureCrawledUrlsIsMutable();
crawledUrls_.add(builderForValue.build());
onChanged();
} else {
crawledUrlsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public Builder addCrawledUrls(
int index, com.google.cloud.websecurityscanner.v1beta.CrawledUrl.Builder builderForValue) {
if (crawledUrlsBuilder_ == null) {
ensureCrawledUrlsIsMutable();
crawledUrls_.add(index, builderForValue.build());
onChanged();
} else {
crawledUrlsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public Builder addAllCrawledUrls(
java.lang.Iterable<? extends com.google.cloud.websecurityscanner.v1beta.CrawledUrl>
values) {
if (crawledUrlsBuilder_ == null) {
ensureCrawledUrlsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, crawledUrls_);
onChanged();
} else {
crawledUrlsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public Builder clearCrawledUrls() {
if (crawledUrlsBuilder_ == null) {
crawledUrls_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
crawledUrlsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public Builder removeCrawledUrls(int index) {
if (crawledUrlsBuilder_ == null) {
ensureCrawledUrlsIsMutable();
crawledUrls_.remove(index);
onChanged();
} else {
crawledUrlsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public com.google.cloud.websecurityscanner.v1beta.CrawledUrl.Builder getCrawledUrlsBuilder(
int index) {
return getCrawledUrlsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public com.google.cloud.websecurityscanner.v1beta.CrawledUrlOrBuilder getCrawledUrlsOrBuilder(
int index) {
if (crawledUrlsBuilder_ == null) {
return crawledUrls_.get(index);
} else {
return crawledUrlsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public java.util.List<? extends com.google.cloud.websecurityscanner.v1beta.CrawledUrlOrBuilder>
getCrawledUrlsOrBuilderList() {
if (crawledUrlsBuilder_ != null) {
return crawledUrlsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(crawledUrls_);
}
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public com.google.cloud.websecurityscanner.v1beta.CrawledUrl.Builder addCrawledUrlsBuilder() {
return getCrawledUrlsFieldBuilder()
.addBuilder(com.google.cloud.websecurityscanner.v1beta.CrawledUrl.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public com.google.cloud.websecurityscanner.v1beta.CrawledUrl.Builder addCrawledUrlsBuilder(
int index) {
return getCrawledUrlsFieldBuilder()
.addBuilder(
index, com.google.cloud.websecurityscanner.v1beta.CrawledUrl.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of CrawledUrls returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.CrawledUrl crawled_urls = 1;</code>
*/
public java.util.List<com.google.cloud.websecurityscanner.v1beta.CrawledUrl.Builder>
getCrawledUrlsBuilderList() {
return getCrawledUrlsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.websecurityscanner.v1beta.CrawledUrl,
com.google.cloud.websecurityscanner.v1beta.CrawledUrl.Builder,
com.google.cloud.websecurityscanner.v1beta.CrawledUrlOrBuilder>
getCrawledUrlsFieldBuilder() {
if (crawledUrlsBuilder_ == null) {
crawledUrlsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.websecurityscanner.v1beta.CrawledUrl,
com.google.cloud.websecurityscanner.v1beta.CrawledUrl.Builder,
com.google.cloud.websecurityscanner.v1beta.CrawledUrlOrBuilder>(
crawledUrls_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
crawledUrls_ = null;
}
return crawledUrlsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse)
private static final com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse();
}
public static com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCrawledUrlsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListCrawledUrlsResponse>() {
@java.lang.Override
public ListCrawledUrlsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListCrawledUrlsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCrawledUrlsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,096 | java-websecurityscanner/proto-google-cloud-websecurityscanner-v1beta/src/main/java/com/google/cloud/websecurityscanner/v1beta/ListScanConfigsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/websecurityscanner/v1beta/web_security_scanner.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.websecurityscanner.v1beta;
/**
*
*
* <pre>
* Response for the `ListScanConfigs` method.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse}
*/
public final class ListScanConfigsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse)
ListScanConfigsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListScanConfigsResponse.newBuilder() to construct.
private ListScanConfigsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListScanConfigsResponse() {
scanConfigs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListScanConfigsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_ListScanConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_ListScanConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse.class,
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse.Builder.class);
}
public static final int SCAN_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.websecurityscanner.v1beta.ScanConfig> scanConfigs_;
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.websecurityscanner.v1beta.ScanConfig>
getScanConfigsList() {
return scanConfigs_;
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder>
getScanConfigsOrBuilderList() {
return scanConfigs_;
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
@java.lang.Override
public int getScanConfigsCount() {
return scanConfigs_.size();
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ScanConfig getScanConfigs(int index) {
return scanConfigs_.get(index);
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder getScanConfigsOrBuilder(
int index) {
return scanConfigs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < scanConfigs_.size(); i++) {
output.writeMessage(1, scanConfigs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < scanConfigs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, scanConfigs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse)) {
return super.equals(obj);
}
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse other =
(com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse) obj;
if (!getScanConfigsList().equals(other.getScanConfigsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getScanConfigsCount() > 0) {
hash = (37 * hash) + SCAN_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getScanConfigsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for the `ListScanConfigs` method.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse)
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_ListScanConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_ListScanConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse.class,
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse.Builder.class);
}
// Construct using
// com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (scanConfigsBuilder_ == null) {
scanConfigs_ = java.util.Collections.emptyList();
} else {
scanConfigs_ = null;
scanConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_ListScanConfigsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse
getDefaultInstanceForType() {
return com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse build() {
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse buildPartial() {
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse result =
new com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse result) {
if (scanConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
scanConfigs_ = java.util.Collections.unmodifiableList(scanConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.scanConfigs_ = scanConfigs_;
} else {
result.scanConfigs_ = scanConfigsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse) {
return mergeFrom(
(com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse other) {
if (other
== com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse
.getDefaultInstance()) return this;
if (scanConfigsBuilder_ == null) {
if (!other.scanConfigs_.isEmpty()) {
if (scanConfigs_.isEmpty()) {
scanConfigs_ = other.scanConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureScanConfigsIsMutable();
scanConfigs_.addAll(other.scanConfigs_);
}
onChanged();
}
} else {
if (!other.scanConfigs_.isEmpty()) {
if (scanConfigsBuilder_.isEmpty()) {
scanConfigsBuilder_.dispose();
scanConfigsBuilder_ = null;
scanConfigs_ = other.scanConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
scanConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getScanConfigsFieldBuilder()
: null;
} else {
scanConfigsBuilder_.addAllMessages(other.scanConfigs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.websecurityscanner.v1beta.ScanConfig m =
input.readMessage(
com.google.cloud.websecurityscanner.v1beta.ScanConfig.parser(),
extensionRegistry);
if (scanConfigsBuilder_ == null) {
ensureScanConfigsIsMutable();
scanConfigs_.add(m);
} else {
scanConfigsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.websecurityscanner.v1beta.ScanConfig> scanConfigs_ =
java.util.Collections.emptyList();
private void ensureScanConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
scanConfigs_ =
new java.util.ArrayList<com.google.cloud.websecurityscanner.v1beta.ScanConfig>(
scanConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.websecurityscanner.v1beta.ScanConfig,
com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder>
scanConfigsBuilder_;
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public java.util.List<com.google.cloud.websecurityscanner.v1beta.ScanConfig>
getScanConfigsList() {
if (scanConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(scanConfigs_);
} else {
return scanConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public int getScanConfigsCount() {
if (scanConfigsBuilder_ == null) {
return scanConfigs_.size();
} else {
return scanConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public com.google.cloud.websecurityscanner.v1beta.ScanConfig getScanConfigs(int index) {
if (scanConfigsBuilder_ == null) {
return scanConfigs_.get(index);
} else {
return scanConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public Builder setScanConfigs(
int index, com.google.cloud.websecurityscanner.v1beta.ScanConfig value) {
if (scanConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureScanConfigsIsMutable();
scanConfigs_.set(index, value);
onChanged();
} else {
scanConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public Builder setScanConfigs(
int index, com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder builderForValue) {
if (scanConfigsBuilder_ == null) {
ensureScanConfigsIsMutable();
scanConfigs_.set(index, builderForValue.build());
onChanged();
} else {
scanConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public Builder addScanConfigs(com.google.cloud.websecurityscanner.v1beta.ScanConfig value) {
if (scanConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureScanConfigsIsMutable();
scanConfigs_.add(value);
onChanged();
} else {
scanConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public Builder addScanConfigs(
int index, com.google.cloud.websecurityscanner.v1beta.ScanConfig value) {
if (scanConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureScanConfigsIsMutable();
scanConfigs_.add(index, value);
onChanged();
} else {
scanConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public Builder addScanConfigs(
com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder builderForValue) {
if (scanConfigsBuilder_ == null) {
ensureScanConfigsIsMutable();
scanConfigs_.add(builderForValue.build());
onChanged();
} else {
scanConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public Builder addScanConfigs(
int index, com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder builderForValue) {
if (scanConfigsBuilder_ == null) {
ensureScanConfigsIsMutable();
scanConfigs_.add(index, builderForValue.build());
onChanged();
} else {
scanConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public Builder addAllScanConfigs(
java.lang.Iterable<? extends com.google.cloud.websecurityscanner.v1beta.ScanConfig>
values) {
if (scanConfigsBuilder_ == null) {
ensureScanConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, scanConfigs_);
onChanged();
} else {
scanConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public Builder clearScanConfigs() {
if (scanConfigsBuilder_ == null) {
scanConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
scanConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public Builder removeScanConfigs(int index) {
if (scanConfigsBuilder_ == null) {
ensureScanConfigsIsMutable();
scanConfigs_.remove(index);
onChanged();
} else {
scanConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder getScanConfigsBuilder(
int index) {
return getScanConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder getScanConfigsOrBuilder(
int index) {
if (scanConfigsBuilder_ == null) {
return scanConfigs_.get(index);
} else {
return scanConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder>
getScanConfigsOrBuilderList() {
if (scanConfigsBuilder_ != null) {
return scanConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(scanConfigs_);
}
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder addScanConfigsBuilder() {
return getScanConfigsFieldBuilder()
.addBuilder(com.google.cloud.websecurityscanner.v1beta.ScanConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder addScanConfigsBuilder(
int index) {
return getScanConfigsFieldBuilder()
.addBuilder(
index, com.google.cloud.websecurityscanner.v1beta.ScanConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of ScanConfigs returned.
* </pre>
*
* <code>repeated .google.cloud.websecurityscanner.v1beta.ScanConfig scan_configs = 1;</code>
*/
public java.util.List<com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder>
getScanConfigsBuilderList() {
return getScanConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.websecurityscanner.v1beta.ScanConfig,
com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder>
getScanConfigsFieldBuilder() {
if (scanConfigsBuilder_ == null) {
scanConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.websecurityscanner.v1beta.ScanConfig,
com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder>(
scanConfigs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
scanConfigs_ = null;
}
return scanConfigsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse)
private static final com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse();
}
public static com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListScanConfigsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListScanConfigsResponse>() {
@java.lang.Override
public ListScanConfigsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListScanConfigsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListScanConfigsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/commons-numbers | 38,456 | commons-numbers-examples/examples-jmh/src/main/java/org/apache/commons/numbers/examples/jmh/core/DoublePrecision.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.numbers.examples.jmh.core;
/**
* Computes double-length precision floating-point operations.
*
* <p>It is based on the 1971 paper
* <a href="https://doi.org/10.1007/BF01397083">
* Dekker (1971) A floating-point technique for extending the available precision</a>.
*/
final class DoublePrecision {
/*
* Caveat:
*
* The code below uses many additions/subtractions that may
* appear redundant. However, they should NOT be simplified, as they
* do use IEEE754 floating point arithmetic rounding properties.
*
* Algorithms are based on computing the product or sum of two values x and y in
* extended precision. The standard result is stored using a double (high part z) and
* the round-off error (or low part zz) is stored in a second double, e.g:
* x * y = (z, zz); z + zz = x * y
* x + y = (z, zz); z + zz = x + y
*
* To sum multiple (z, zz) results ideally the parts are sorted in order of
* non-decreasing magnitude and summed. This is exact if each number's most significant
* bit is below the least significant bit of the next (i.e. does not
* overlap). Creating non-overlapping parts requires a rebalancing
* of adjacent pairs using a summation z + zz = (z1, zz1) iteratively through the parts
* (see Shewchuk (1997) Grow-Expansion and Expansion-Sum [1]).
*
* [1] Shewchuk (1997): Arbitrary Precision Floating-Point Arithmetic
* http://www-2.cs.cmu.edu/afs/cs/project/quake/public/papers/robust-arithmetic.ps
*/
/**
* The multiplier used to split the double value into high and low parts. From
* Dekker (1971): "The constant should be chosen equal to 2^(p - p/2) + 1,
* where p is the number of binary digits in the mantissa". Here p is 53
* and the multiplier is {@code 2^27 + 1}.
*/
private static final double MULTIPLIER = 1.34217729E8;
/** The upper limit above which a number may overflow during the split into a high part.
* Assuming the multiplier is above 2^27 and the maximum exponent is 1023 then a safe
* limit is a value with an exponent of (1023 - 27) = 2^996. */
private static final double SAFE_UPPER = 0x1.0p996;
/** The lower limit for a product {@code x * y} below which the round-off component may be
* sub-normal. This is set as 2^-1022 * 2^54. */
private static final double SAFE_LOWER = 0x1.0p-968;
/** The scale to use when down-scaling during a split into a high part.
* This must be smaller than the inverse of the multiplier and a power of 2 for exact scaling. */
private static final double DOWN_SCALE = 0x1.0p-30;
/** The scale to use when re-scaling during a split into a high part.
* This is the inverse of {@link #DOWN_SCALE}. */
private static final double UP_SCALE = 0x1.0p30;
/** The upscale factor squared. */
private static final double UP_SCALE2 = 0x1.0p60;
/** The downscale factor squared. */
private static final double DOWN_SCALE2 = 0x1.0p-60;
/** The safe upper limit so the product {@code x * y} can be upscaled by 2^60. */
private static final double SAFE_UPPER_S = 0x1.0p963;
/** The mask to zero the lower 27-bits of a long . */
private static final long ZERO_LOWER_27_BITS = 0xffff_ffff_f800_0000L;
/** The mask to extract the raw 11-bit exponent.
* The value must be shifted 52-bits to remove the mantissa bits. */
private static final int EXP_MASK = 0x7ff;
/** The value 2046 converted for use if using {@link Integer#compareUnsigned(int, int)}.
* This requires adding {@link Integer#MIN_VALUE} to 2046. */
private static final int CMP_UNSIGNED_2046 = Integer.MIN_VALUE + 2046;
/** The value -1 converted for use if using {@link Integer#compareUnsigned(int, int)}.
* This requires adding {@link Integer#MIN_VALUE} to -1. */
private static final int CMP_UNSIGNED_MINUS_1 = Integer.MIN_VALUE - 1;
/**
* Represents a floating-point number with twice the precision of a {@code double}.
*/
static final class Quad {
// This is treated as a simple struct.
// CHECKSTYLE: stop VisibilityModifier
/** The high part of the number. */
double hi;
/** The low part of the number. */
double lo;
// CHECKSTYLE: resume VisibilityModifier
}
/** Private constructor. */
private DoublePrecision() {
// intentionally empty.
}
/**
* Multiply the values {@code x} and {@code y} into a double-precision result {@code z}.
* It is assumed the numbers are normalized so no over/underflow will occurs.
*
* <p>Implements Dekker's mul12 method to split the numbers and multiply them
* in extended precision.
*
* <p>Note: The quad satisfies the condition {@code x * y == z.hi + z.lo}. The high
* part may be different from {@code x * y} by 1 ulp due to rounding.
*
* @param x First value
* @param y Second value
* @param z Result
*/
static void multiplyUnscaled(double x, double y, Quad z) {
// Note: The original mul12 algorithm avoids x * y and saves 1 multiplication.
double p;
p = x * MULTIPLIER;
final double hx = x - p + p;
final double lx = x - hx;
p = y * MULTIPLIER;
final double hy = y - p + p;
final double ly = y - hy;
p = hx * hy;
final double q = hx * ly + lx * hy;
z.hi = p + q;
z.lo = p - z.hi + q + lx * ly;
}
/**
* Multiply the values {@code x} and {@code y} into a double-precision result {@code c}.
* Scaling is performed on the numbers to protect against intermediate over/underflow.
*
* <p>The quadruple precision result has the standard double precision result
* {@code x * y} in the high part and the round-off in the low part,
*
* <p>Special cases:
*
* <ul>
* <li>If {@code x * y} is sub-normal or zero then the low part is 0.0.
* <li>If {@code x * y} is infinite or NaN then the low part is NaN.
* </ul>
*
* <p>Note: This does not represent the low part of infinity with zero. This is because the
* method is intended to be used for extended precision computations. The NaN low part
* signals that an extended precision computation using the result is invalid (i.e. the
* result of summation/multiplication of the parts will not be finite).
*
* @param x First value
* @param y Second value
* @param c Result
* @see DoublePrecision#productLowUnscaled(double, double, double)
*/
static void multiply(double x, double y, Quad c) {
// Special cases. Check the product.
final double xy = x * y;
if (isNotNormal(xy)) {
c.hi = xy;
// Returns 0.0 for sub-normal xy, otherwise NaN for inf/nan
c.lo = xy - xy;
return;
}
// Extract biased exponent and normalise.
// Sub-normals are scaled by 2^54 and the exponent adjusted.
// This is equivalent to the c function frexp which decomposes given floating
// point value arg into a normalized fraction and an integral power of two.
// Here we use a biased exponent as it is later adjusted when re-scaling.
long xb = Double.doubleToRawLongBits(x);
int xe = getBiasedExponent(xb);
double xs;
if (xe == 0) {
// Sub-normal. Scale up and extract again
xs = x * 0x1.0p54;
xb = Double.doubleToRawLongBits(xs);
xe = getBiasedExponent(xb) - 54;
}
xs = getNormalisedFraction(xb);
long yb = Double.doubleToRawLongBits(y);
int ye = getBiasedExponent(yb);
double ys;
if (ye == 0) {
// Sub-normal. Scale up and extract again
ys = y * 0x1.0p54;
yb = Double.doubleToRawLongBits(ys);
ye = getBiasedExponent(yb) - 54;
}
ys = getNormalisedFraction(yb);
// Compute hi as x*y.
// Thus if the standard precision result is finite (as verified in the initial test
// on x * y) then the extended precision result will be.
double z = xs * ys;
double zz = productLowUnscaled(xs, ys, z);
// Re-scale. The result is currently in the range [0.25, 1) so no checks for
// 0, nan, inf (the result exponent will be -2 or -1).
// Both exponents are currently biased so subtract 1023 to get the biased scale.
int scale = xe + ye - 1023;
// Compute scaling by multiplication so we can scale both together.
// If a single multiplication to a normal number then handle here.
if (scale <= 2046 && scale > 0) {
// Convert to a normalized power of 2
final double d = Double.longBitsToDouble(((long) scale) << 52);
z *= d;
zz *= d;
} else {
// Delegate to java.util.Math
// We have to adjust the biased scale to unbiased using the exponent offset 1023.
scale -= 1023;
z = Math.scalb(z, scale);
zz = Math.scalb(zz, scale);
}
// Final result. The hi part should be same as the IEEE754 result.
// assert z == xy;
c.hi = z;
c.lo = zz;
}
/**
* Checks if the number is not normal. This is functionally equivalent to:
* <pre>
* final double abs = Math.abs(a);
* return (abs <= Double.MIN_NORMAL || !(absXy <= Double.MAX_VALUE));
* </pre>
*
* @param a The value.
* @return true if the value is not normal
*/
static boolean isNotNormal(double a) {
// Sub-normal numbers have a biased exponent of 0.
// Inf/NaN numbers have a biased exponent of 2047.
// Catch both cases by extracting the raw exponent, subtracting 1
// and compare unsigned (so 0 underflows to a large value).
final int baisedExponent = ((int) (Double.doubleToRawLongBits(a) >>> 52)) & EXP_MASK;
// Pre-compute the additions used by Integer.compareUnsigned
return baisedExponent + CMP_UNSIGNED_MINUS_1 >= CMP_UNSIGNED_2046;
}
/**
* Gets the exponent.
*
* @param bits the bits
* @return the exponent
*/
private static int getBiasedExponent(long bits) {
return (int)(bits >>> 52) & 0x7ff;
}
/**
* Gets the normalized fraction in the range [0.5, 1).
*
* @param bits the bits
* @return the exponent
*/
private static double getNormalisedFraction(long bits) {
// Mask out the exponent and set it to 1022.
return Double.longBitsToDouble((bits & 0x800f_ffff_ffff_ffffL) | 0x3fe0_0000_0000_0000L);
}
/**
* Implement Dekker's method to split a value into two parts. Multiplying by (2^s + 1) creates
* a big value from which to derive the two split parts.
* <pre>
* c = (2^s + 1) * a
* a_big = c - a
* a_hi = c - a_big
* a_lo = a - a_hi
* a = a_hi + a_lo
* </pre>
*
* <p>The multiplicand allows a p-bit value to be split into
* (p-s)-bit value {@code a_hi} and a non-overlapping (s-1)-bit value {@code a_lo}.
* Combined they have (p-1) bits of significand but the sign bit of {@code a_lo}
* contains a bit of information. The constant is chosen so that s is ceil(p/2) where
* the precision p for a double is 53-bits (1-bit of the mantissa is assumed to be
* 1 for a non sub-normal number) and s is 27.
*
* <p>This conversion uses scaling to avoid overflow in intermediate computations.
*
* <p>Splitting a NaN or infinite value will return NaN. Any finite value will return
* a finite value.
*
* @param value Value.
* @return the high part of the value.
*/
static double highPart(double value) {
// Avoid overflow
if (Math.abs(value) >= SAFE_UPPER) {
// Do scaling.
final double hi = highPartUnscaled(value * DOWN_SCALE) * UP_SCALE;
if (Double.isInfinite(hi)) {
// Number is too large.
// This occurs if value is infinite or close to Double.MAX_VALUE.
// Note that Dekker's split creates an approximating 26-bit number which may
// have an exponent 1 greater than the input value. This will overflow if the
// exponent is already +1023. Revert to the raw upper 26 bits of the 53-bit
// mantissa (including the assumed leading 1 bit). This conversion will result in
// the low part being a 27-bit significand and the potential loss of bits during
// addition and multiplication. (Contrast to the Dekker split which creates two
// 26-bit numbers with a bit of information moved to the sign of low.)
// The conversion will maintain Infinite in the high part where the resulting
// low part a_lo = a - a_hi = inf - inf = NaN.
return highPartSplit(value);
}
return hi;
}
// normal conversion
return highPartUnscaled(value);
}
/**
* Implement Dekker's method to split a value into two parts (see {@link #highPart(double)}).
*
* <p>This conversion does not use scaling and the result of overflow is NaN. Overflow
* may occur when the exponent of the input value is above 996.
*
* <p>Splitting a NaN or infinite value will return NaN.
*
* @param value Value.
* @return the high part of the value.
* @see Math#getExponent(double)
*/
static double highPartUnscaled(double value) {
final double c = MULTIPLIER * value;
return c - (c - value);
}
/**
* Implement a split using the upper and lower raw bits from the value.
*
* <p>Note: This method will not work for very small sub-normal numbers
* ({@code <= 27} bits) as the high part will be zero and the low part will
* have all the information. Methods that assume {@code hi > lo} will have
* undefined behaviour.
*
* <p>Splitting a NaN value will return NaN or infinite. Splitting an infinite
* value will return infinite. Any finite value will return a finite value.
*
* @param value Value.
* @return the high part of the value.
*/
static double highPartSplit(double value) {
return Double.longBitsToDouble(Double.doubleToRawLongBits(value) & ZERO_LOWER_27_BITS);
}
/**
* Compute the low part of the double length number {@code (z,zz)} for the exact
* product of {@code x} and {@code y}. This is equivalent to computing a {@code double}
* containing the magnitude of the rounding error when converting the exact 106-bit
* significand of the multiplication result to a 53-bit significand.
*
* <p>The method is written to be functionally similar to using a fused multiply add (FMA)
* operation to compute the low part, for example JDK 9's Math.fma function (note the sign
* change in the input argument for the product):
* <pre>
* double x = ...;
* double y = ...;
* double xy = x * y;
* double low1 = Math.fma(x, y, -xy);
* double low2 = DoublePrecision.productLow(x, y, xy);
* </pre>
*
* <p>Special cases:
*
* <ul>
* <li>If {@code x * y} is sub-normal or zero then the result is 0.0.
* <li>If {@code x * y} is infinite or NaN then the result is NaN.
* </ul>
*
* @param x First factor.
* @param y Second factor.
* @param xy Product of the factors (x * y).
* @return the low part of the product double length number
* @see #highPart(double)
* @see #productLow(double, double, double, double, double)
*/
static double productLow(double x, double y, double xy) {
// Verify the input. This must be NaN safe.
//assert Double.compare(x * y, xy) == 0
// If the number is sub-normal, inf or nan there is no round-off.
if (isNotNormal(xy)) {
// Returns 0.0 for sub-normal xy, otherwise NaN for inf/nan:
return xy - xy;
}
// The result xy is finite and normal.
// Use Dekker's mul12 algorithm that splits the values into high and low parts.
// Dekker's split using multiplication will overflow if the value is within 2^27
// of double max value. It can also produce 26-bit approximations that are larger
// than the input numbers for the high part causing overflow in hx * hy when
// x * y does not overflow. So we must scale down big numbers.
// We only have to scale the largest number as we know the product does not overflow
// (if one is too big then the other cannot be).
// We also scale if the product is close to overflow to avoid intermediate overflow.
// This could be done at a higher limit (e.g. Math.abs(xy) > Double.MAX_VALUE / 4)
// but is included here to have a single low probability branch condition.
// Add the absolute inputs for a single comparison. The sum will not be more than
// 3-fold higher than any component.
final double a = Math.abs(x);
final double b = Math.abs(y);
final double ab = Math.abs(xy);
if (a + b + ab >= SAFE_UPPER) {
// Only required to scale the largest number as x*y does not overflow.
if (a > b) {
return productLowUnscaled(x * DOWN_SCALE, y, xy * DOWN_SCALE) * UP_SCALE;
}
return productLowUnscaled(x, y * DOWN_SCALE, xy * DOWN_SCALE) * UP_SCALE;
}
// The result is computed using a product of the low parts.
// To avoid underflow in the low parts we note that these are approximately a factor
// of 2^27 smaller than the original inputs so their product will be ~2^54 smaller
// than the product xy. Ensure the product is at least 2^54 above a sub-normal.
if (ab <= SAFE_LOWER) {
// Scaling up here is safe: the largest magnitude cannot be above SAFE_LOWER / MIN_VALUE.
return productLowUnscaled(x * UP_SCALE, y * UP_SCALE, xy * UP_SCALE2) * DOWN_SCALE2;
}
// No scaling required
return productLowUnscaled(x, y, xy);
}
/**
* Compute the low part of the double length number {@code (z,zz)} for the exact
* product of {@code x} and {@code y}. This is equivalent to computing a {@code double}
* containing the magnitude of the rounding error when converting the exact 106-bit
* significand of the multiplication result to a 53-bit significand.
*
* <p>The method is written to be functionally similar to using a fused multiply add (FMA)
* operation to compute the low part, for example JDK 9's Math.fma function (note the sign
* change in the input argument for the product):
* <pre>
* double x = ...;
* double y = ...;
* double xy = x * y;
* double low1 = Math.fma(x, y, -xy);
* double low2 = DoublePrecision.productLow(x, y, xy);
* </pre>
*
* <p>Special cases:
*
* <ul>
* <li>If {@code x * y} is sub-normal or zero then the result is 0.0.
* <li>If {@code x * y} is infinite or NaN then the result is NaN.
* </ul>
*
* @param x First factor.
* @param y Second factor.
* @param xy Product of the factors (x * y).
* @return the low part of the product double length number
* @see #highPart(double)
* @see #productLow(double, double, double, double, double)
*/
static double productLowS(double x, double y, double xy) {
// Verify the input. This must be NaN safe.
//assert Double.compare(x * y, xy) == 0
// If the number is sub-normal, inf or nan there is no round-off.
if (isNotNormal(xy)) {
// Returns 0.0 for sub-normal xy, otherwise NaN for inf/nan:
return xy - xy;
}
// The result xy is finite and normal.
// Use Dekker's mul12 algorithm that splits the values into high and low parts.
// Dekker's split using multiplication will overflow if the value is within 2^27
// of double max value. It can also produce 26-bit approximations that are larger
// than the input numbers for the high part causing overflow in hx * hy when
// x * y does not overflow. So we must scale down big numbers.
// We only have to scale the largest number as we know the product does not overflow
// (if one is too big then the other cannot be).
// We also scale if the product is close to overflow to avoid intermediate overflow.
// This could be done at a higher limit (e.g. Math.abs(xy) > Double.MAX_VALUE / 4)
// but is included here to have a single low probability branch condition.
// Add the absolute inputs for a single comparison. The sum will not be more than
// 3-fold higher than any component.
// Note: To drop a branch to check for upscaling, we use a lower threshold than
// SAFE_UPPER in productLow
final double a = Math.abs(x);
final double b = Math.abs(y);
if (a + b + Math.abs(xy) >= SAFE_UPPER_S) {
// Only required to scale the largest number as x*y does not overflow.
if (a > b) {
return productLowUnscaled(x * DOWN_SCALE, y, xy * DOWN_SCALE) * UP_SCALE;
}
return productLowUnscaled(x, y * DOWN_SCALE, xy * DOWN_SCALE) * UP_SCALE;
}
// Scaling up here is safe
return productLowUnscaled(x * UP_SCALE, y * UP_SCALE, xy * UP_SCALE2) * DOWN_SCALE2;
}
/**
* Compute the low part of the double length number {@code (z,zz)} for the exact
* product of {@code x} and {@code y}. This is equivalent to computing a {@code double}
* containing the magnitude of the rounding error when converting the exact 106-bit
* significand of the multiplication result to a 53-bit significand.
*
* <p>The method is written to be functionally similar to using a fused multiply add (FMA)
* operation to compute the low part, for example JDK 9's Math.fma function (note the sign
* change in the input argument for the product):
* <pre>
* double x = ...;
* double y = ...;
* double xy = x * y;
* double low1 = Math.fma(x, y, -xy);
* double low2 = DoublePrecision.productLow(x, y, xy);
* </pre>
*
* <p>Special cases:
*
* <ul>
* <li>If {@code x * y} is sub-normal or zero then the result is 0.0.
* <li>If {@code x * y} is infinite or NaN then the result is NaN.
* </ul>
*
* @param x First factor.
* @param y Second factor.
* @param xy Product of the factors (x * y).
* @return the low part of the product double length number
* @see #highPart(double)
* @see #productLow(double, double, double, double, double)
*/
static double productLow0(double x, double y, double xy) {
// Verify the input. This must be NaN safe.
//assert Double.compare(x * y, xy) == 0
// If the number is sub-normal, inf or nan there is no round-off.
if (isNotNormal(xy)) {
// Returns 0.0 for sub-normal xy, otherwise NaN for inf/nan:
return xy - xy;
}
// The result xy is finite and normal.
// Use Dekker's mul12 algorithm that splits the values into high and low parts.
// Dekker's split using multiplication will overflow if the value is within 2^27
// of double max value. It can also produce 26-bit approximations that are larger
// than the input numbers for the high part causing overflow in hx * hy when
// x * y does not overflow. So we must scale down big numbers.
// We only have to scale the largest number as we know the product does not overflow
// (if one is too big then the other cannot be).
// We also scale if the product is close to overflow to avoid intermediate overflow.
// This could be done at a higher limit (e.g. Math.abs(xy) > Double.MAX_VALUE / 4)
// but is included here to have a single low probability branch condition.
// Add the absolute inputs for a single comparison. The sum will not be more than
// 3-fold higher than any component.
final double a = Math.abs(x);
final double b = Math.abs(y);
if (a + b + Math.abs(xy) >= SAFE_UPPER) {
// Only required to scale the largest number as x*y does not overflow.
if (a > b) {
return productLowUnscaled(x * DOWN_SCALE, y, xy * DOWN_SCALE) * UP_SCALE;
}
return productLowUnscaled(x, y * DOWN_SCALE, xy * DOWN_SCALE) * UP_SCALE;
}
// No scaling required
return productLowUnscaled(x, y, xy);
}
/**
* Compute the low part of the double length number {@code (z,zz)} for the exact
* product of {@code x} and {@code y}. This is equivalent to computing a {@code double}
* containing the magnitude of the rounding error when converting the exact 106-bit
* significand of the multiplication result to a 53-bit significand.
*
* <p>Special cases:
*
* <ul>
* <li>If {@code x * y} is sub-normal or zero then the result is 0.0.
* <li>If {@code x * y} is infinite, and {@code x} and {@code y} are finite then the
* result is the opposite infinity.
* <li>If {@code x} or {@code y} are infinite then the result is NaN.
* <li>If {@code x * y} is NaN then the result is NaN.
* </ul>
*
* @param x First factor.
* @param y Second factor.
* @param xy Product of the factors (x * y).
* @return the low part of the product double length number
* @see #highPart(double)
* @see #productLow(double, double, double, double, double)
*/
static double productLow1(double x, double y, double xy) {
// Verify the input. This must be NaN safe.
//assert Double.compare(x * y, xy) == 0
// Logic as per productLow but with no check for sub-normal or NaN.
final double a = Math.abs(x);
final double b = Math.abs(y);
if (a + b + Math.abs(xy) >= SAFE_UPPER) {
// Only required to scale the largest number as x*y does not overflow.
if (a > b) {
return productLowUnscaled(x * DOWN_SCALE, y, xy * DOWN_SCALE) * UP_SCALE;
}
return productLowUnscaled(x, y * DOWN_SCALE, xy * DOWN_SCALE) * UP_SCALE;
}
// No scaling required
return productLowUnscaled(x, y, xy);
}
/**
* Compute the low part of the double length number {@code (z,zz)} for the exact
* product of {@code x} and {@code y}. This is equivalent to computing a {@code double}
* containing the magnitude of the rounding error when converting the exact 106-bit
* significand of the multiplication result to a 53-bit significand.
*
* <p>The method is written to be functionally similar to using a fused multiply add (FMA)
* operation to compute the low part, for example JDK 9's Math.fma function (note the sign
* change in the input argument for the product):
* <pre>
* double x = ...;
* double y = ...;
* double xy = x * y;
* double low1 = Math.fma(x, y, -xy);
* double low2 = DoublePrecision.productLow(x, y, xy);
* </pre>
*
* <p>Special cases:
*
* <ul>
* <li>If {@code x * y} is sub-normal or zero then the result is 0.0.
* <li>If {@code x * y} is infinite or NaN then the result is NaN.
* </ul>
*
* @param x First factor.
* @param y Second factor.
* @param xy Product of the factors (x * y).
* @return the low part of the product double length number
* @see #highPart(double)
* @see #productLow(double, double, double, double, double)
*/
static double productLow2(double x, double y, double xy) {
// Verify the input. This must be NaN safe.
//assert Double.compare(x * y, xy) == 0
// If the number is sub-normal, inf or nan there is no round-off.
if (isNotNormal(xy)) {
// Returns 0.0 for sub-normal xy, otherwise NaN for inf/nan:
return xy - xy;
}
// The result xy is finite and normal.
// Use Dekker's mul12 algorithm that splits the values into high and low parts.
// Dekker's split using multiplication will overflow if the value is within 2^27
// of double max value. It can also produce 26-bit approximations that are larger
// than the input numbers for the high part causing overflow in hx * hy when
// x * y does not overflow. So we must scale down big numbers.
// We only have to scale the largest number as we know the product does not overflow
// (if one is too big then the other cannot be).
// Also scale if the product is close to max value.
if (Math.abs(x) >= SAFE_UPPER) {
return productLowUnscaled(x * DOWN_SCALE, y, xy * DOWN_SCALE) * UP_SCALE;
}
if (Math.abs(y) >= SAFE_UPPER || Math.abs(xy) >= Double.MAX_VALUE / 4) {
return productLowUnscaled(x, y * DOWN_SCALE, xy * DOWN_SCALE) * UP_SCALE;
}
// No scaling required
return productLowUnscaled(x, y, xy);
}
/**
* Compute the low part of the double length number {@code (z,zz)} for the exact
* product of {@code x} and {@code y} using Dekker's mult12 algorithm. The standard
* precision product {@code x*y} must be provided. The numbers {@code x} and {@code y}
* are split into high and low parts using Dekker's algorithm.
*
* <p>This method performs scaling in Dekker's split for large finite numbers to avoid
* overflow when generating the high part of the number.
*
* <p>Warning: Dekker's split can produce high parts that are larger in magnitude than
* the input number as the high part is a 26-bit approximation of the number. Thus it is
* possible that the standard product {@code x * y} does not overflow but the extended
* precision sub-product {@code hx * hy} does overflow. This method should not be
* considered safe for all combinations where {@code Double.isFinite(x * y)} is true.
* The method is used for benchmarking.
*
* @param x First factor.
* @param y Second factor.
* @param xy Product of the factors (x * y).
* @return the low part of the product double length number
* @see #highPart(double)
* @see #productLow(double, double, double, double, double)
*/
static double productLow3(double x, double y, double xy) {
// Split the numbers using Dekker's algorithm
final double hx = highPart(x);
final double lx = x - hx;
final double hy = highPart(y);
final double ly = y - hy;
return productLow(hx, lx, hy, ly, xy);
}
/**
* Compute the low part of the double length number {@code (z,zz)} for the
* product of {@code x} and {@code y} using a Dekker's mult12 algorithm. The
* standard precision product {@code x*y} must be provided. The numbers
* {@code x} and {@code y} are split into high and low parts by zeroing the
* lower 27-bits of the mantissa to create the high part. This may lose 1 bit of
* precision in the resulting low part computed by subtraction. The intermediate
* computations will not overflow as the split results are always smaller in
* magnitude than the input numbers.
*
* <p>The method is used for benchmarking as results may not be exact due to
* loss of a bit during splitting of the input factors.
*
* @param x First factor.
* @param y Second factor.
* @param xy Product of the factors (x * y).
* @return the low part of the product double length number
* @see #highPart(double)
* @see #productLow(double, double, double, double, double)
*/
static double productLowSplit(double x, double y, double xy) {
// Split the numbers using Dekker's algorithm
final double hx = highPartSplit(x);
final double lx = x - hx;
final double hy = highPartSplit(y);
final double ly = y - hy;
return productLow(hx, lx, hy, ly, xy);
}
/**
* Compute the low part of the double length number {@code (z,zz)} for the exact
* product of {@code x} and {@code y} using Dekker's mult12 algorithm. The standard
* precision product {@code x*y} must be provided. The numbers {@code x} and {@code y}
* are split into high and low parts using Dekker's algorithm.
*
* <p>Warning: This method does not perform scaling in Dekker's split and large
* finite numbers can create NaN results.
*
* @param x First factor.
* @param y Second factor.
* @param xy Product of the factors (x * y).
* @return the low part of the product double length number
* @see #highPartUnscaled(double)
* @see #productLow(double, double, double, double, double)
*/
static double productLowUnscaled(double x, double y, double xy) {
// Split the numbers using Dekker's algorithm without scaling
final double hx = highPartUnscaled(x);
final double lx = x - hx;
final double hy = highPartUnscaled(y);
final double ly = y - hy;
return productLow(hx, lx, hy, ly, xy);
}
/**
* Compute the low part of the double length number {@code (z,zz)} for the exact
* product of {@code x} and {@code y} using Dekker's mult12 algorithm. The standard
* precision product {@code x*y} must be provided. The numbers {@code x} and {@code y}
* should already be split into low and high parts.
*
* <p>Note: This uses the high part of the result {@code (z,zz)} as {@code x * y} and not
* {@code hx * hy + hx * ty + tx * hy} as specified in Dekker's original paper.
* See Shewchuk (1997) for working examples.
*
* @param hx High part of first factor.
* @param lx Low part of first factor.
* @param hy High part of second factor.
* @param ly Low part of second factor.
* @param xy Product of the factors.
* @return <code>lx * ly - (((xy - hx * hy) - lx * hy) - hx * ly)</code>
* @see <a href="http://www-2.cs.cmu.edu/afs/cs/project/quake/public/papers/robust-arithmetic.ps">
* Shewchuk (1997) Theorum 18</a>
*/
static double productLow(double hx, double lx, double hy, double ly, double xy) {
// Compute the multiply low part:
// err1 = xy - hx * hy
// err2 = err1 - lx * hy
// err3 = err2 - hx * ly
// low = lx * ly - err3
return lx * ly - (((xy - hx * hy) - lx * hy) - hx * ly);
}
/**
* Compute the round-off {@code s} from the sum of two split numbers {@code (x, xx)}
* and {@code (y, yy)} using Dekker's add2 algorithm. The values are not required to be
* ordered by magnitude as an absolute comparison is made to determine the summation order.
* The sum of the high parts {@code r} must be provided.
*
* <p>The result {@code (r, s)} must be re-balanced to create the split result {@code (z, zz)}:
* <pre>
* z = r + s
* zz = r - z + s
* </pre>
*
* @param x High part of first number.
* @param xx Low part of first number.
* @param y High part of second number.
* @param yy Low part of second number.
* @param r Sum of the parts (x + y) = r
* @return The round-off from the sum (x + y) = s
*/
static double sumLow(double x, double xx, double y, double yy, double r) {
return Math.abs(x) > Math.abs(y) ?
x - r + y + yy + xx :
y - r + x + xx + yy;
}
/**
* Compute the round-off from the sum of two numbers {@code a} and {@code b} using
* Dekker's two-sum algorithm. The values are required to be ordered by magnitude
* {@code |a| >= |b|}. The standard precision sum must be provided.
*
* @param a First part of sum.
* @param b Second part of sum.
* @param sum Sum of the parts (a + b).
* @return <code>b - (sum - a)</code>
* @see <a href="http://www-2.cs.cmu.edu/afs/cs/project/quake/public/papers/robust-arithmetic.ps">
* Shewchuk (1997) Theorum 6</a>
*/
static double fastTwoSumLow(double a, double b, double sum) {
// bVirtual = sum - a
// b - bVirtual == b round-off
return b - (sum - a);
}
/**
* Compute the round-off from the sum of two numbers {@code a} and {@code b} using
* Knuth's two-sum algorithm. The values are not required to be ordered by magnitude.
* The standard precision sum must be provided.
*
* @param a First part of sum.
* @param b Second part of sum.
* @param sum Sum of the parts (a + b).
* @return <code>(b - (sum - (sum - b))) + (a - (sum - b))</code>
* @see <a href="http://www-2.cs.cmu.edu/afs/cs/project/quake/public/papers/robust-arithmetic.ps">
* Shewchuk (1997) Theorum 7</a>
*/
static double twoSumLow(double a, double b, double sum) {
final double bVirtual = sum - a;
// sum - bVirtual == aVirtual.
// a - aVirtual == a round-off
// b - bVirtual == b round-off
return (a - (sum - bVirtual)) + (b - bVirtual);
}
}
|
apache/hive | 38,127 | standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestPartitionManagement.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.metastore;
import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME;
import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.repl.ReplConst;
import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
import org.apache.hadoop.hive.metastore.api.Catalog;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder;
import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
import org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder;
import org.apache.hadoop.hive.metastore.client.builder.TableBuilder;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
import org.apache.hadoop.hive.metastore.leader.HouseKeepingTasks;
import org.apache.hadoop.hive.metastore.leader.StaticLeaderElection;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.utils.TestTxnDbUtil;
import org.apache.thrift.TException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.google.common.collect.Lists;
@Category(MetastoreUnitTest.class)
public class TestPartitionManagement {
private IMetaStoreClient client;
private Configuration conf;
@Before
public void setUp() throws Exception {
conf = MetastoreConf.newMetastoreConf();
conf.setClass(MetastoreConf.ConfVars.EXPRESSION_PROXY_CLASS.getVarname(),
MsckPartitionExpressionProxy.class, PartitionExpressionProxy.class);
MetastoreConf.setVar(conf, ConfVars.METASTORE_METADATA_TRANSFORMER_CLASS, " ");
MetaStoreTestUtils.setConfForStandloneMode(conf);
conf.setBoolean(ConfVars.MULTITHREADED.getVarname(), false);
conf.setBoolean(ConfVars.HIVE_IN_TEST.getVarname(), true);
MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), conf);
TestTxnDbUtil.setConfValues(conf);
TestTxnDbUtil.prepDb(conf);
client = new HiveMetaStoreClient(conf);
}
@After
public void tearDown() throws Exception {
if (client != null) {
// Drop any left over catalogs
List<String> catalogs = client.getCatalogs();
for (String catName : catalogs) {
if (!catName.equalsIgnoreCase(DEFAULT_CATALOG_NAME)) {
// First drop any databases in catalog
List<String> databases = client.getAllDatabases(catName);
for (String db : databases) {
client.dropDatabase(catName, db, true, false, true);
}
client.dropCatalog(catName);
} else {
List<String> databases = client.getAllDatabases(catName);
for (String db : databases) {
if (!db.equalsIgnoreCase(Warehouse.DEFAULT_DATABASE_NAME)) {
client.dropDatabase(catName, db, true, false, true);
}
}
}
}
}
try {
if (client != null) {
client.close();
}
} finally {
client = null;
}
}
private Map<String, Column> buildAllColumns() {
Map<String, Column> colMap = new HashMap<>(6);
Column[] cols = {new Column("b", "binary"), new Column("bo", "boolean"),
new Column("d", "date"), new Column("do", "double"), new Column("l", "bigint"),
new Column("s", "string")};
for (Column c : cols) {
colMap.put(c.colName, c);
}
return colMap;
}
private List<String> createMetadata(String catName, String dbName, String tableName,
List<String> partKeys, List<String> partKeyTypes, List<List<String>> partVals,
Map<String, Column> colMap, boolean isOrc)
throws TException {
if (!DEFAULT_CATALOG_NAME.equals(catName)) {
Catalog cat = new CatalogBuilder()
.setName(catName)
.setLocation(MetaStoreTestUtils.getTestWarehouseDir(catName))
.build();
client.createCatalog(cat);
}
Database db;
if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
DatabaseBuilder dbBuilder = new DatabaseBuilder()
.setName(dbName);
dbBuilder.setCatalogName(catName);
db = dbBuilder.create(client, conf);
} else {
db = client.getDatabase(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME);
}
TableBuilder tb = new TableBuilder()
.inDb(db)
.setTableName(tableName);
if (isOrc) {
tb.setInputFormat("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat")
.setOutputFormat("org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat");
}
for (Column col : colMap.values()) {
tb.addCol(col.colName, col.colType);
}
if (partKeys != null) {
if (partKeyTypes == null) {
throw new IllegalArgumentException("partKeyTypes cannot be null when partKeys is non-null");
}
if (partKeys.size() != partKeyTypes.size()) {
throw new IllegalArgumentException("partKeys and partKeyTypes size should be same");
}
if (partVals.isEmpty()) {
throw new IllegalArgumentException("partVals cannot be empty for patitioned table");
}
for (int i = 0; i < partKeys.size(); i++) {
tb.addPartCol(partKeys.get(i), partKeyTypes.get(i));
}
}
Table table = tb.create(client, conf);
if (partKeys != null) {
for (List<String> partVal : partVals) {
new PartitionBuilder()
.inTable(table)
.setValues(partVal)
.addToTable(client, conf);
}
}
List<String> partNames = new ArrayList<>();
if (partKeys != null) {
for (int i = 0; i < partKeys.size(); i++) {
String partKey = partKeys.get(i);
for (String partVal : partVals.get(i)) {
String partName = partKey + "=" + partVal;
partNames.add(partName);
}
}
}
client.flushCache();
return partNames;
}
@Test
public void testPartitionDiscoveryDisabledByDefault() throws TException, IOException {
String dbName = "db1";
String tableName = "tbl1";
Map<String, Column> colMap = buildAllColumns();
List<String> partKeys = Lists.newArrayList("state", "dt");
List<String> partKeyTypes = Lists.newArrayList("string", "date");
List<List<String>> partVals = Lists.newArrayList(
Lists.newArrayList("__HIVE_DEFAULT_PARTITION__", "1990-01-01"),
Lists.newArrayList("CA", "1986-04-28"),
Lists.newArrayList("MN", "2018-11-31"));
createMetadata(DEFAULT_CATALOG_NAME, dbName, tableName, partKeys, partKeyTypes, partVals, colMap, false);
Table table = client.getTable(dbName, tableName);
List<Partition> partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
String tableLocation = table.getSd().getLocation();
URI location = URI.create(tableLocation);
Path tablePath = new Path(location);
FileSystem fs = FileSystem.get(location, conf);
fs.mkdirs(new Path(tablePath, "state=WA/dt=2018-12-01"));
fs.mkdirs(new Path(tablePath, "state=UT/dt=2018-12-02"));
assertEquals(5, fs.listStatus(tablePath).length);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
// partition discovery is not enabled via table property, so nothing should change on this table
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
// table property is set to false, so no change expected
table.getParameters().put(PartitionManagementTask.DISCOVER_PARTITIONS_TBLPROPERTY, "false");
client.alter_table(dbName, tableName, table);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
}
@Test
public void testPartitionDiscoveryEnabledBothTableTypes() throws Exception {
String dbName = "db2";
String tableName = "tbl2";
Map<String, Column> colMap = buildAllColumns();
List<String> partKeys = Lists.newArrayList("state", "dt");
List<String> partKeyTypes = Lists.newArrayList("string", "date");
List<List<String>> partVals = Lists.newArrayList(
Lists.newArrayList("__HIVE_DEFAULT_PARTITION__", "1990-01-01"),
Lists.newArrayList("CA", "1986-04-28"),
Lists.newArrayList("MN", "2018-11-31"));
createMetadata(DEFAULT_CATALOG_NAME, dbName, tableName, partKeys, partKeyTypes, partVals, colMap, false);
Table table = client.getTable(dbName, tableName);
List<Partition> partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
String tableLocation = table.getSd().getLocation();
URI location = URI.create(tableLocation);
Path tablePath = new Path(location);
FileSystem fs = FileSystem.get(location, conf);
Path newPart1 = new Path(tablePath, "state=WA/dt=2018-12-01");
Path newPart2 = new Path(tablePath, "state=UT/dt=2018-12-02");
fs.mkdirs(newPart1);
fs.mkdirs(newPart2);
assertEquals(5, fs.listStatus(tablePath).length);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
// table property is set to true, we expect 5 partitions
table.getParameters().put(PartitionManagementTask.DISCOVER_PARTITIONS_TBLPROPERTY, "true");
client.alter_table(dbName, tableName, table);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(5, partitions.size());
// change table type to external, delete a partition directory and make sure partition discovery works
table.getParameters().put("EXTERNAL", "true");
table.setTableType(TableType.EXTERNAL_TABLE.name());
client.alter_table(dbName, tableName, table);
boolean deleted = fs.delete(newPart1.getParent(), true);
assertTrue(deleted);
assertEquals(4, fs.listStatus(tablePath).length);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(4, partitions.size());
// remove external tables from partition discovery and expect no changes even after partition is deleted
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_TABLE_TYPES.getVarname(), TableType.MANAGED_TABLE.name());
deleted = fs.delete(newPart2.getParent(), true);
assertTrue(deleted);
assertEquals(3, fs.listStatus(tablePath).length);
// this doesn't remove partition because table is still external and we have remove external table type from
// partition discovery
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(4, partitions.size());
// no table types specified, msck will not select any tables
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_TABLE_TYPES.getVarname(), "");
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(4, partitions.size());
// only EXTERNAL table type, msck should drop a partition now
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_TABLE_TYPES.getVarname(), TableType.EXTERNAL_TABLE.name());
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
// only MANAGED table type
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_TABLE_TYPES.getVarname(), TableType.MANAGED_TABLE.name());
table.getParameters().remove("EXTERNAL");
table.setTableType(TableType.MANAGED_TABLE.name());
client.alter_table(dbName, tableName, table);
Assert.assertTrue(fs.mkdirs(newPart1));
Assert.assertTrue(fs.mkdirs(newPart2));
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(5, partitions.size());
Assert.assertTrue(fs.delete(newPart1, true));
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(4, partitions.size());
// disable partition management task by default. Currently, there are 4 directories
// this test adds two additional paths and verifies that partitions are not added to
// metastore when partition management task is disabled.
Assert.assertTrue(fs.mkdirs(new Path(tablePath, "state=AZ/dt=2025-07-01")));
Assert.assertTrue(fs.mkdirs(new Path(tablePath, "state=NV/dt=2025-07-02")));
assertEquals(6, fs.listStatus(tablePath).length);
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_TASK_FREQUENCY.getVarname(), "0");
conf.set(MetastoreConf.ConfVars.TASK_THREADS_REMOTE_ONLY.getVarname(),
"org.apache.hadoop.hive.metastore.PartitionManagementTask");
HouseKeepingTasks listener = new HouseKeepingTasks(conf, true);
StaticLeaderElection election = new StaticLeaderElection();
election.setName("TestPartitionManagement");
listener.takeLeadership(election);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(4, partitions.size());
// Re-enable PMT and verify 6 partitions
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_TASK_FREQUENCY.getVarname(), "1");
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(6, partitions.size());
}
@Test
public void testPartitionDiscoveryNonDefaultCatalog() throws TException, IOException {
String catName = "cat3";
String dbName = "db3";
String tableName = "tbl3";
Map<String, Column> colMap = buildAllColumns();
List<String> partKeys = Lists.newArrayList("state", "dt");
List<String> partKeyTypes = Lists.newArrayList("string", "date");
List<List<String>> partVals = Lists.newArrayList(
Lists.newArrayList("__HIVE_DEFAULT_PARTITION__", "1990-01-01"),
Lists.newArrayList("CA", "1986-04-28"),
Lists.newArrayList("MN", "2018-11-31"));
createMetadata(catName, dbName, tableName, partKeys, partKeyTypes, partVals, colMap, false);
Table table = client.getTable(catName, dbName, tableName);
List<Partition> partitions = client.listPartitions(catName, dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
String tableLocation = table.getSd().getLocation();
URI location = URI.create(tableLocation);
Path tablePath = new Path(location);
FileSystem fs = FileSystem.get(location, conf);
Path newPart1 = new Path(tablePath, "state=WA/dt=2018-12-01");
Path newPart2 = new Path(tablePath, "state=UT/dt=2018-12-02");
fs.mkdirs(newPart1);
fs.mkdirs(newPart2);
assertEquals(5, fs.listStatus(tablePath).length);
table.getParameters().put(PartitionManagementTask.DISCOVER_PARTITIONS_TBLPROPERTY, "true");
client.alter_table(catName, dbName, tableName, table);
// default catalog in conf is 'hive' but we are using 'cat3' as catName for this test, so msck should not fix
// anything for this one
runPartitionManagementTask(conf);
partitions = client.listPartitions(catName, dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
// using the correct catalog name, we expect msck to fix partitions
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_CATALOG_NAME.getVarname(), catName);
runPartitionManagementTask(conf);
partitions = client.listPartitions(catName, dbName, tableName, (short) -1);
assertEquals(5, partitions.size());
}
@Test
public void testPartitionDiscoveryDBPattern() throws TException, IOException {
String dbName = "db4";
String tableName = "tbl4";
Map<String, Column> colMap = buildAllColumns();
List<String> partKeys = Lists.newArrayList("state", "dt");
List<String> partKeyTypes = Lists.newArrayList("string", "date");
List<List<String>> partVals = Lists.newArrayList(
Lists.newArrayList("__HIVE_DEFAULT_PARTITION__", "1990-01-01"),
Lists.newArrayList("CA", "1986-04-28"),
Lists.newArrayList("MN", "2018-11-31"));
createMetadata(DEFAULT_CATALOG_NAME, dbName, tableName, partKeys, partKeyTypes, partVals, colMap, false);
Table table = client.getTable(dbName, tableName);
List<Partition> partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
String tableLocation = table.getSd().getLocation();
URI location = URI.create(tableLocation);
Path tablePath = new Path(location);
FileSystem fs = FileSystem.get(location, conf);
Path newPart1 = new Path(tablePath, "state=WA/dt=2018-12-01");
Path newPart2 = new Path(tablePath, "state=UT/dt=2018-12-02");
fs.mkdirs(newPart1);
fs.mkdirs(newPart2);
assertEquals(5, fs.listStatus(tablePath).length);
table.getParameters().put(PartitionManagementTask.DISCOVER_PARTITIONS_TBLPROPERTY, "true");
client.alter_table(dbName, tableName, table);
// no match for this db pattern, so we will see only 3 partitions
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_DATABASE_PATTERN.getVarname(), "*dbfoo*");
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
// matching db pattern, we will see all 5 partitions now
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_DATABASE_PATTERN.getVarname(), "*db4*");
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(5, partitions.size());
fs.mkdirs(new Path(tablePath, "state=MG/dt=2021-28-05"));
assertEquals(6, fs.listStatus(tablePath).length);
Database db = client.getDatabase(table.getDbName());
//PartitionManagementTask would not run for the database which is being failed over.
db.putToParameters(ReplConst.REPL_FAILOVER_ENDPOINT, MetaStoreUtils.FailoverEndpoint.SOURCE.toString());
client.alterDatabase(dbName, db);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(5, partitions.size());
}
@Test
public void testPartitionDiscoveryTablePattern() throws TException, IOException {
String dbName = "db5";
String tableName = "tbl5";
Map<String, Column> colMap = buildAllColumns();
List<String> partKeys = Lists.newArrayList("state", "dt");
List<String> partKeyTypes = Lists.newArrayList("string", "date");
List<List<String>> partVals = Lists.newArrayList(
Lists.newArrayList("__HIVE_DEFAULT_PARTITION__", "1990-01-01"),
Lists.newArrayList("CA", "1986-04-28"),
Lists.newArrayList("MN", "2018-11-31"));
createMetadata(DEFAULT_CATALOG_NAME, dbName, tableName, partKeys, partKeyTypes, partVals, colMap, false);
Table table = client.getTable(dbName, tableName);
List<Partition> partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
String tableLocation = table.getSd().getLocation();
URI location = URI.create(tableLocation);
Path tablePath = new Path(location);
FileSystem fs = FileSystem.get(location, conf);
Path newPart1 = new Path(tablePath, "state=WA/dt=2018-12-01");
Path newPart2 = new Path(tablePath, "state=UT/dt=2018-12-02");
fs.mkdirs(newPart1);
fs.mkdirs(newPart2);
assertEquals(5, fs.listStatus(tablePath).length);
table.getParameters().put(PartitionManagementTask.DISCOVER_PARTITIONS_TBLPROPERTY, "true");
client.alter_table(dbName, tableName, table);
// no match for this table pattern, so we will see only 3 partitions
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_TABLE_PATTERN.getVarname(), "*tblfoo*");
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
// matching table pattern, we will see all 5 partitions now
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_TABLE_PATTERN.getVarname(), "tbl5*");
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(5, partitions.size());
}
@Test
public void testPartitionDiscoveryTransactionalTable()
throws TException, IOException, InterruptedException, ExecutionException {
String dbName = "db6";
String tableName = "tbl6";
Map<String, Column> colMap = buildAllColumns();
List<String> partKeys = Lists.newArrayList("state", "dt");
List<String> partKeyTypes = Lists.newArrayList("string", "date");
List<List<String>> partVals = Lists.newArrayList(
Lists.newArrayList("__HIVE_DEFAULT_PARTITION__", "1990-01-01"),
Lists.newArrayList("CA", "1986-04-28"),
Lists.newArrayList("MN", "2018-11-31"));
createMetadata(DEFAULT_CATALOG_NAME, dbName, tableName, partKeys, partKeyTypes, partVals, colMap, true);
Table table = client.getTable(dbName, tableName);
List<Partition> partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
String tableLocation = table.getSd().getLocation();
URI location = URI.create(tableLocation);
Path tablePath = new Path(location);
FileSystem fs = FileSystem.get(location, conf);
Path newPart1 = new Path(tablePath, "state=WA/dt=2018-12-01");
Path newPart2 = new Path(tablePath, "state=UT/dt=2018-12-02");
fs.mkdirs(newPart1);
fs.mkdirs(newPart2);
assertEquals(5, fs.listStatus(tablePath).length);
table.getParameters().put(PartitionManagementTask.DISCOVER_PARTITIONS_TBLPROPERTY, "true");
table.getParameters().put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, "true");
table.getParameters().put(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES,
TransactionalValidationListener.INSERTONLY_TRANSACTIONAL_PROPERTY);
client.alter_table(dbName, tableName, table);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(5, partitions.size());
// only one partition discovery task is running, there will be no skipped attempts
assertEquals(0, PartitionManagementTask.getSkippedAttempts());
// delete a partition from fs, and submit 3 tasks at the same time each of them trying to acquire X lock on the
// same table, only one of them will run other attempts will be skipped
boolean deleted = fs.delete(newPart1.getParent(), true);
assertTrue(deleted);
assertEquals(4, fs.listStatus(tablePath).length);
// 3 tasks are submitted at the same time, only one will eventually lock the table and only one get to run at a time
// This is to simulate, skipping partition discovery task attempt when previous attempt is still incomplete
PartitionManagementTask partitionDiscoveryTask1 = new PartitionManagementTask();
partitionDiscoveryTask1.setConf(conf);
PartitionManagementTask partitionDiscoveryTask2 = new PartitionManagementTask();
partitionDiscoveryTask2.setConf(conf);
PartitionManagementTask partitionDiscoveryTask3 = new PartitionManagementTask();
partitionDiscoveryTask3.setConf(conf);
List<PartitionManagementTask> tasks = Lists
.newArrayList(partitionDiscoveryTask1, partitionDiscoveryTask2, partitionDiscoveryTask3);
ExecutorService executorService = Executors.newFixedThreadPool(3);
int successBefore = PartitionManagementTask.getCompletedAttempts();
int skippedBefore = PartitionManagementTask.getSkippedAttempts();
List<Future<?>> futures = new ArrayList<>();
for (PartitionManagementTask task : tasks) {
futures.add(executorService.submit(task));
}
for (Future<?> future : futures) {
future.get();
}
int successAfter = PartitionManagementTask.getCompletedAttempts();
int skippedAfter = PartitionManagementTask.getSkippedAttempts();
assertEquals(1, successAfter - successBefore);
assertEquals(2, skippedAfter - skippedBefore);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(4, partitions.size());
}
@Test
public void testPartitionRetention() throws TException, IOException, InterruptedException {
String dbName = "db7";
String tableName = "tbl7";
Map<String, Column> colMap = buildAllColumns();
List<String> partKeys = Lists.newArrayList("state", "dt");
List<String> partKeyTypes = Lists.newArrayList("string", "date");
List<List<String>> partVals = Lists.newArrayList(
Lists.newArrayList("__HIVE_DEFAULT_PARTITION__", "1990-01-01"),
Lists.newArrayList("CA", "1986-04-28"),
Lists.newArrayList("MN", "2018-11-31"));
createMetadata(DEFAULT_CATALOG_NAME, dbName, tableName, partKeys, partKeyTypes, partVals, colMap, false);
Table table = client.getTable(dbName, tableName);
List<Partition> partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
String tableLocation = table.getSd().getLocation();
URI location = URI.create(tableLocation);
Path tablePath = new Path(location);
FileSystem fs = FileSystem.get(location, conf);
Path newPart1 = new Path(tablePath, "state=WA/dt=2018-12-01");
Path newPart2 = new Path(tablePath, "state=UT/dt=2018-12-02");
fs.mkdirs(newPart1);
fs.mkdirs(newPart2);
assertEquals(5, fs.listStatus(tablePath).length);
table.getParameters().put(PartitionManagementTask.DISCOVER_PARTITIONS_TBLPROPERTY, "true");
table.getParameters().put(PartitionManagementTask.PARTITION_RETENTION_PERIOD_TBLPROPERTY, "20000ms");
client.alter_table(dbName, tableName, table);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(5, partitions.size());
Database db = client.getDatabase(table.getDbName());
db.putToParameters(ReplConst.REPL_FAILOVER_ENDPOINT, MetaStoreUtils.FailoverEndpoint.SOURCE.toString());
client.alterDatabase(table.getDbName(), db);
// PartitionManagementTask would not do anything because the db is being failed over.
Thread.sleep(30 * 1000);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(5, partitions.size());
db.putToParameters(ReplConst.REPL_FAILOVER_ENDPOINT, "");
client.alterDatabase(table.getDbName(), db);
// after 30s all partitions should have been gone
Thread.sleep(30 * 1000);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(0, partitions.size());
}
@Test
public void testPartitionDiscoverySkipInvalidPath() throws TException, IOException, InterruptedException {
String dbName = "db8";
String tableName = "tbl8";
Map<String, Column> colMap = buildAllColumns();
List<String> partKeys = Lists.newArrayList("state", "dt");
List<String> partKeyTypes = Lists.newArrayList("string", "date");
List<List<String>> partVals = Lists.newArrayList(
Lists.newArrayList("__HIVE_DEFAULT_PARTITION__", "1990-01-01"),
Lists.newArrayList("CA", "1986-04-28"),
Lists.newArrayList("MN", "2018-11-31"));
createMetadata(DEFAULT_CATALOG_NAME, dbName, tableName, partKeys, partKeyTypes, partVals, colMap, false);
Table table = client.getTable(dbName, tableName);
List<Partition> partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
String tableLocation = table.getSd().getLocation();
URI location = URI.create(tableLocation);
Path tablePath = new Path(location);
FileSystem fs = FileSystem.get(location, conf);
Path newPart1 = new Path(tablePath, "state=WA/dt=2018-12-01");
Path newPart2 = new Path(tablePath, "state=UT/dt=");
fs.mkdirs(newPart1);
fs.mkdirs(newPart2);
assertEquals(5, fs.listStatus(tablePath).length);
table.getParameters().put(PartitionManagementTask.DISCOVER_PARTITIONS_TBLPROPERTY, "true");
// empty retention period basically means disabled
table.getParameters().put(PartitionManagementTask.PARTITION_RETENTION_PERIOD_TBLPROPERTY, "");
client.alter_table(dbName, tableName, table);
// there is one partition with invalid path which will get skipped
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(4, partitions.size());
}
@Test
public void testNoPartitionDiscoveryForReplTable() throws Exception {
String dbName = "db_repl1";
String tableName = "tbl_repl1";
Map<String, Column> colMap = buildAllColumns();
List<String> partKeys = Lists.newArrayList("state", "dt");
List<String> partKeyTypes = Lists.newArrayList("string", "date");
List<List<String>> partVals = Lists.newArrayList(
Lists.newArrayList("__HIVE_DEFAULT_PARTITION__", "1990-01-01"),
Lists.newArrayList("CA", "1986-04-28"),
Lists.newArrayList("MN", "2018-11-31"));
createMetadata(DEFAULT_CATALOG_NAME, dbName, tableName, partKeys, partKeyTypes, partVals, colMap, false);
Table table = client.getTable(dbName, tableName);
List<Partition> partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
String tableLocation = table.getSd().getLocation();
URI location = URI.create(tableLocation);
Path tablePath = new Path(location);
FileSystem fs = FileSystem.get(location, conf);
Path newPart1 = new Path(tablePath, "state=WA/dt=2018-12-01");
Path newPart2 = new Path(tablePath, "state=UT/dt=2018-12-02");
fs.mkdirs(newPart1);
fs.mkdirs(newPart2);
assertEquals(5, fs.listStatus(tablePath).length);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
// table property is set to true, but the table is marked as replication target. The new
// partitions should not be created
table.getParameters().put(PartitionManagementTask.DISCOVER_PARTITIONS_TBLPROPERTY, "true");
Database db = client.getDatabase(table.getDbName());
db.putToParameters(ReplConst.TARGET_OF_REPLICATION, "true");
client.alterDatabase(table.getDbName(), db);
client.alter_table(dbName, tableName, table);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
// change table type to external, delete a partition directory and make sure partition discovery works
table.getParameters().put("EXTERNAL", "true");
table.setTableType(TableType.EXTERNAL_TABLE.name());
client.alter_table(dbName, tableName, table);
// Delete location of one of the partitions. The partition discovery task should not drop
// that partition.
boolean deleted = fs.delete((new Path(URI.create(partitions.get(0).getSd().getLocation()))).getParent(),
true);
assertTrue(deleted);
assertEquals(4, fs.listStatus(tablePath).length);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
//Check that partition Discovery works for database with repl.background.enable as true.
db = client.getDatabase(table.getDbName());
db.putToParameters(ReplConst.REPL_ENABLE_BACKGROUND_THREAD, ReplConst.TRUE);
client.alterDatabase(table.getDbName(), db);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(4, partitions.size());
}
@Test
public void testNoPartitionRetentionForReplTarget() throws TException, InterruptedException {
String dbName = "db_repl2";
String tableName = "tbl_repl2";
Map<String, Column> colMap = buildAllColumns();
List<String> partKeys = Lists.newArrayList("state", "dt");
List<String> partKeyTypes = Lists.newArrayList("string", "date");
List<List<String>> partVals = Lists.newArrayList(
Lists.newArrayList("__HIVE_DEFAULT_PARTITION__", "1990-01-01"),
Lists.newArrayList("CA", "1986-04-28"),
Lists.newArrayList("MN", "2018-11-31"));
// Check for the existence of partitions 10 seconds after the partition retention period has
// elapsed. Gives enough time for the partition retention task to work.
long partitionRetentionPeriodMs = 20000;
long waitingPeriodForTest = partitionRetentionPeriodMs + 10 * 1000;
createMetadata(DEFAULT_CATALOG_NAME, dbName, tableName, partKeys, partKeyTypes, partVals, colMap, false);
Table table = client.getTable(dbName, tableName);
List<Partition> partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
table.getParameters().put(PartitionManagementTask.DISCOVER_PARTITIONS_TBLPROPERTY, "true");
table.getParameters().put(PartitionManagementTask.PARTITION_RETENTION_PERIOD_TBLPROPERTY,
partitionRetentionPeriodMs + "ms");
client.alter_table(dbName, tableName, table);
Database db = client.getDatabase(table.getDbName());
db.putToParameters(ReplConst.TARGET_OF_REPLICATION, "true");
client.alterDatabase(table.getDbName(), db);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
// after 30s all partitions should remain in-tact for a table which is target of replication.
Thread.sleep(waitingPeriodForTest);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
//Check that partition retention works for database with repl.background.enable as true.
db = client.getDatabase(table.getDbName());
db.putToParameters(ReplConst.REPL_ENABLE_BACKGROUND_THREAD, ReplConst.TRUE);
client.alterDatabase(table.getDbName(), db);
Thread.sleep(waitingPeriodForTest);
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(0, partitions.size());
}
@Test
public void testPartitionExprFilter() throws TException, IOException {
String dbName = "db10";
String tableName = "tbl10";
Map<String, Column> colMap = buildAllColumns();
List<String> partKeys = Lists.newArrayList("state", "dt", "modts");
List<String> partKeyTypes = Lists.newArrayList("string", "date", "timestamp");
List<List<String>> partVals = Lists.newArrayList(
Lists.newArrayList("__HIVE_DEFAULT_PARTITION__", "1990-01-01", "__HIVE_DEFAULT_PARTITION__"),
Lists.newArrayList("CA", "1986-04-28", "2020-02-21 08:30:01"),
Lists.newArrayList("MN", "2018-11-31", "2020-02-21 08:19:01"));
createMetadata(DEFAULT_CATALOG_NAME, dbName, tableName, partKeys, partKeyTypes, partVals, colMap, false);
Table table = client.getTable(dbName, tableName);
table.getParameters().put(PartitionManagementTask.DISCOVER_PARTITIONS_TBLPROPERTY, "true");
table.getParameters().put("EXTERNAL", "true");
table.setTableType(TableType.EXTERNAL_TABLE.name());
client.alter_table(dbName, tableName, table);
List<Partition> partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(3, partitions.size());
String tableLocation = table.getSd().getLocation();
URI location = URI.create(tableLocation);
Path tablePath = new Path(location);
FileSystem fs = FileSystem.get(location, conf);
String partPath = partitions.get(1).getSd().getLocation();
Path newPart1 = new Path(tablePath, partPath);
fs.delete(newPart1);
conf.set(MetastoreConf.ConfVars.PARTITION_MANAGEMENT_DATABASE_PATTERN.getVarname(), "*db10*");
conf.set(ConfVars.PARTITION_MANAGEMENT_TABLE_TYPES.getVarname(), TableType.EXTERNAL_TABLE.name());
runPartitionManagementTask(conf);
partitions = client.listPartitions(dbName, tableName, (short) -1);
assertEquals(2, partitions.size());
}
private void runPartitionManagementTask(Configuration conf) {
PartitionManagementTask task = new PartitionManagementTask();
task.setConf(conf);
task.run();
}
private static class Column {
private String colName;
private String colType;
public Column(final String colName, final String colType) {
this.colName = colName;
this.colType = colType;
}
}
}
|
apache/ignite | 38,137 | modules/core/src/main/java/org/apache/ignite/internal/processors/affinity/GridAffinityAssignmentCache.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.affinity;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentNavigableMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.IgniteSystemProperties;
import org.apache.ignite.cache.affinity.AffinityCentralizedFunction;
import org.apache.ignite.cache.affinity.AffinityFunction;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.events.DiscoveryEvent;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.cluster.NodeOrderComparator;
import org.apache.ignite.internal.managers.discovery.DiscoCache;
import org.apache.ignite.internal.processors.cache.ExchangeDiscoveryEvents;
import org.apache.ignite.internal.processors.cache.GridCachePartitionExchangeManager;
import org.apache.ignite.internal.processors.cluster.BaselineTopology;
import org.apache.ignite.internal.util.future.GridFutureAdapter;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.SB;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgnitePredicate;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_AFFINITY_HISTORY_SIZE;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD;
import static org.apache.ignite.IgniteSystemProperties.getFloat;
import static org.apache.ignite.IgniteSystemProperties.getInteger;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.internal.events.DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT;
/**
* Affinity cached function.
*/
public class GridAffinityAssignmentCache {
/** @see IgniteSystemProperties#IGNITE_AFFINITY_HISTORY_SIZE */
public static final int DFLT_AFFINITY_HISTORY_SIZE = 25;
/** @see IgniteSystemProperties#IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD */
public static final float DFLT_PART_DISTRIBUTION_WARN_THRESHOLD = 50f;
/**
* Affinity cache will shrink when total number of non-shallow (see {@link HistoryAffinityAssignmentImpl})
* historical instances will be greater than value of this constant.
*/
final int maxNonShallowHistSize = getInteger(IGNITE_AFFINITY_HISTORY_SIZE, DFLT_AFFINITY_HISTORY_SIZE);
/**
* Affinity cache will also shrink when total number of both shallow ({@link HistoryAffinityAssignmentShallowCopy})
* and non-shallow (see {@link HistoryAffinityAssignmentImpl}) historical instances will be greater than
* value of this constant.
*/
final int maxTotalHistSize = maxNonShallowHistSize * 10;
/**
* Independent of {@link #maxNonShallowHistSize} and {@link #maxTotalHistSize}, affinity cache will always
* keep this number of non-shallow (see {@link HistoryAffinityAssignmentImpl}) instances.
* We need at least one real instance, otherwise we won't be able to get affinity cache for
* {@link GridCachePartitionExchangeManager#lastAffinityChangedTopologyVersion} in case cluster has experienced
* too many client joins / client leaves / local cache starts.
*/
private static final int MIN_NON_SHALLOW_HIST_SIZE = 2;
/** Partition distribution. */
private final float partDistribution =
getFloat(IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD, DFLT_PART_DISTRIBUTION_WARN_THRESHOLD);
/** Group name if specified or cache name. */
private final String cacheOrGrpName;
/** Group ID. */
private final int grpId;
/** Number of backups. */
private final int backups;
/** Affinity function. */
private final AffinityFunction aff;
/** */
private final IgnitePredicate<ClusterNode> nodeFilter;
/** Partitions count. */
private final int partsCnt;
/** Affinity calculation results cache: topology version => partition => nodes. */
private final ConcurrentNavigableMap<AffinityTopologyVersion, HistoryAffinityAssignment> affCache;
/** */
private volatile IdealAffinityAssignment idealAssignment;
/** */
private volatile IdealAffinityAssignment baselineAssignment;
/** */
private BaselineTopology baselineTopology;
/** Cache item corresponding to the head topology version. */
private final AtomicReference<GridAffinityAssignmentV2> head;
/** Ready futures. */
private final ConcurrentMap<AffinityTopologyVersion, AffinityReadyFuture> readyFuts = new ConcurrentSkipListMap<>();
/** Log. */
private final IgniteLogger log;
/** */
private final GridKernalContext ctx;
/** Node stop flag. */
private volatile IgniteCheckedException stopErr;
/** Number of non-shallow (see {@link HistoryAffinityAssignmentImpl}) affinity cache instances. */
private volatile int nonShallowHistSize;
/** */
private final Object similarAffKey;
/**
* Constructs affinity cached calculations.
*
* @param ctx Kernal context.
* @param cacheOrGrpName Cache or cache group name.
* @param grpId Group ID.
* @param aff Affinity function.
* @param nodeFilter Node filter.
* @param backups Number of backups.
*/
private GridAffinityAssignmentCache(GridKernalContext ctx,
String cacheOrGrpName,
int grpId,
AffinityFunction aff,
IgnitePredicate<ClusterNode> nodeFilter,
int backups
) {
assert ctx != null;
assert aff != null;
assert nodeFilter != null;
assert grpId != 0;
this.ctx = ctx;
this.aff = aff;
this.nodeFilter = nodeFilter;
this.cacheOrGrpName = cacheOrGrpName;
this.grpId = grpId;
this.backups = backups;
log = ctx.log(GridAffinityAssignmentCache.class);
partsCnt = aff.partitions();
affCache = new ConcurrentSkipListMap<>();
head = new AtomicReference<>(new GridAffinityAssignmentV2(AffinityTopologyVersion.NONE));
similarAffKey = ctx.affinity().similaryAffinityKey(aff, nodeFilter, backups, partsCnt);
assert similarAffKey != null;
}
/**
* @param ctx Kernal context.
* @param aff Initialized affinity function.
* @param ccfg Cache configuration.
* @return Affinity assignment cache instance.
*/
public static GridAffinityAssignmentCache create(GridKernalContext ctx, AffinityFunction aff, CacheConfiguration<?, ?> ccfg) {
return new GridAffinityAssignmentCache(ctx,
CU.cacheOrGroupName(ccfg),
CU.cacheGroupId(ccfg),
aff,
ccfg.getNodeFilter(),
ccfg.getBackups());
}
/**
* @return Key to find caches with similar affinity.
*/
public Object similarAffinityKey() {
return similarAffKey;
}
/**
* @return Group name if it is specified, otherwise cache name.
*/
public String cacheOrGroupName() {
return cacheOrGrpName;
}
/**
* @return Cache group ID.
*/
public int groupId() {
return grpId;
}
/**
* Initializes affinity with given topology version and assignment.
*
* @param topVer Topology version.
* @param affAssignment Affinity assignment for topology version.
*/
public void initialize(AffinityTopologyVersion topVer, List<List<ClusterNode>> affAssignment) {
assert topVer.compareTo(lastVersion()) >= 0 : "[topVer = " + topVer + ", last=" + lastVersion() + ']';
assert idealAssignment != null;
GridAffinityAssignmentV2 assignment = new GridAffinityAssignmentV2(topVer, affAssignment, idealAssignment.assignment());
HistoryAffinityAssignmentImpl newHistEntry = new HistoryAffinityAssignmentImpl(assignment, backups);
HistoryAffinityAssignment existing = affCache.put(topVer, newHistEntry);
head.set(assignment);
for (Map.Entry<AffinityTopologyVersion, AffinityReadyFuture> entry : readyFuts.entrySet()) {
if (entry.getKey().compareTo(topVer) <= 0) {
if (log.isDebugEnabled())
log.debug("Completing topology ready future (initialized affinity) " +
"[locNodeId=" + ctx.localNodeId() + ", futVer=" + entry.getKey() + ", topVer=" + topVer + ']');
entry.getValue().onDone(topVer);
}
}
onHistoryAdded(existing, newHistEntry);
if (log.isTraceEnabled()) {
log.trace("New affinity assignment [grp=" + cacheOrGrpName
+ ", topVer=" + topVer
+ ", aff=" + fold(affAssignment) + "]");
}
}
/**
* @param assignment Assignment.
*/
public void idealAssignment(AffinityTopologyVersion topVer, List<List<ClusterNode>> assignment) {
this.idealAssignment = IdealAffinityAssignment.create(topVer, assignment);
}
/**
* @return Assignment.
*/
@Nullable public List<List<ClusterNode>> idealAssignmentRaw() {
return idealAssignment != null ? idealAssignment.assignment() : null;
}
/**
*
*/
@Nullable public IdealAffinityAssignment idealAssignment() {
return idealAssignment;
}
/**
* @return {@code True} if affinity function has {@link AffinityCentralizedFunction} annotation.
*/
public boolean centralizedAffinityFunction() {
return U.hasAnnotation(aff, AffinityCentralizedFunction.class);
}
/**
* Kernal stop callback.
*
* @param err Error.
*/
public void cancelFutures(IgniteCheckedException err) {
stopErr = err;
for (AffinityReadyFuture fut : readyFuts.values())
fut.onDone(err);
}
/**
*
*/
public void onReconnected() {
idealAssignment = null;
affCache.clear();
nonShallowHistSize = 0;
head.set(new GridAffinityAssignmentV2(AffinityTopologyVersion.NONE));
stopErr = null;
}
/**
* Calculates ideal assignment for given topology version and events happened since last calculation.
*
* @param topVer Topology version to calculate affinity cache for.
* @param events Discovery events that caused this topology version change.
* @param discoCache Discovery cache.
* @return Ideal affinity assignment.
*/
public IdealAffinityAssignment calculate(
AffinityTopologyVersion topVer,
@Nullable ExchangeDiscoveryEvents events,
@Nullable DiscoCache discoCache
) {
if (log.isDebugEnabled())
log.debug("Calculating ideal affinity [topVer=" + topVer + ", locNodeId=" + ctx.localNodeId() +
", discoEvts=" + events + ']');
IdealAffinityAssignment prevAssignment = idealAssignment;
// Already calculated.
if (prevAssignment != null && prevAssignment.topologyVersion().equals(topVer))
return prevAssignment;
// Resolve nodes snapshot for specified topology version.
List<ClusterNode> sorted = new ArrayList<>(discoCache.cacheGroupAffinityNodes(groupId()));
sorted.sort(NodeOrderComparator.getInstance());
boolean hasBaseline = false;
boolean changedBaseline = false;
BaselineTopology blt = null;
if (discoCache != null) {
blt = discoCache.state().baselineTopology();
hasBaseline = blt != null;
changedBaseline = !hasBaseline ? baselineTopology != null : !blt.equals(baselineTopology);
}
IdealAffinityAssignment assignment;
if (prevAssignment != null && events != null) {
/* Skip affinity calculation only when all nodes triggered exchange
don't belong to affinity for current group (client node or filtered by nodeFilter). */
boolean skipCalculation = true;
for (DiscoveryEvent evt : events.events()) {
boolean affNode = CU.affinityNode(evt.eventNode(), nodeFilter);
if (affNode || evt.type() == EVT_DISCOVERY_CUSTOM_EVT) {
skipCalculation = false;
break;
}
}
if (hasBaseline && changedBaseline) {
recalculateBaselineAssignment(topVer, events, prevAssignment, sorted, blt);
assignment = IdealAffinityAssignment.create(
topVer,
sorted,
baselineAssignmentWithoutOfflineNodes(discoCache)
);
}
else if (skipCalculation)
assignment = prevAssignment;
else if (hasBaseline) {
if (baselineAssignment == null)
recalculateBaselineAssignment(topVer, events, prevAssignment, sorted, blt);
assignment = IdealAffinityAssignment.create(
topVer,
sorted,
baselineAssignmentWithoutOfflineNodes(discoCache)
);
}
else {
List<List<ClusterNode>> calculated = aff.assignPartitions(new GridAffinityFunctionContextImpl(
sorted,
prevAssignment.assignment(),
events.lastEvent(),
topVer,
backups
));
assignment = IdealAffinityAssignment.create(topVer, sorted, calculated);
}
}
else {
if (hasBaseline) {
recalculateBaselineAssignment(topVer, events, prevAssignment, sorted, blt);
assignment = IdealAffinityAssignment.createWithPreservedPrimaries(
topVer,
baselineAssignmentWithoutOfflineNodes(discoCache),
baselineAssignment
);
}
else {
List<List<ClusterNode>> calculated = aff.assignPartitions(new GridAffinityFunctionContextImpl(sorted,
prevAssignment != null ? prevAssignment.assignment() : null,
events != null ? events.lastEvent() : null,
topVer,
backups
));
assignment = IdealAffinityAssignment.create(topVer, sorted, calculated);
}
}
assert assignment != null;
idealAssignment = assignment;
if (ctx.cache().cacheMode(cacheOrGrpName) == PARTITIONED && !ctx.clientNode())
printDistributionIfThresholdExceeded(assignment.assignment(), sorted.size());
if (hasBaseline) {
baselineTopology = blt;
assert baselineAssignment != null;
}
else {
baselineTopology = null;
baselineAssignment = null;
}
return assignment;
}
/**
* @param topVer Topology version.
* @param events Evetns.
* @param prevAssignment Previous assignment.
* @param sorted Sorted cache group nodes.
* @param blt Baseline topology.
*/
private void recalculateBaselineAssignment(
AffinityTopologyVersion topVer,
ExchangeDiscoveryEvents events,
IdealAffinityAssignment prevAssignment,
List<ClusterNode> sorted,
BaselineTopology blt
) {
List<ClusterNode> baselineAffNodes = blt.createBaselineView(sorted, nodeFilter);
List<List<ClusterNode>> calculated = aff.assignPartitions(new GridAffinityFunctionContextImpl(
baselineAffNodes,
prevAssignment != null ? prevAssignment.assignment() : null,
events != null ? events.lastEvent() : null,
topVer,
backups
));
baselineAssignment = IdealAffinityAssignment.create(topVer, baselineAffNodes, calculated);
}
/**
* @param disco Discovery history.
* @return Baseline assignment with filtered out offline nodes.
*/
private List<List<ClusterNode>> baselineAssignmentWithoutOfflineNodes(DiscoCache disco) {
Map<Object, ClusterNode> alives = new HashMap<>();
for (ClusterNode node : disco.serverNodes())
alives.put(node.consistentId(), node);
List<List<ClusterNode>> assignment = baselineAssignment.assignment();
List<List<ClusterNode>> result = new ArrayList<>(assignment.size());
for (int p = 0; p < assignment.size(); p++) {
List<ClusterNode> baselineMapping = assignment.get(p);
List<ClusterNode> curMapping = null;
for (ClusterNode node : baselineMapping) {
ClusterNode aliveNode = alives.get(node.consistentId());
if (aliveNode != null) {
if (curMapping == null)
curMapping = new ArrayList<>();
curMapping.add(aliveNode);
}
}
result.add(p, curMapping != null ? curMapping : Collections.<ClusterNode>emptyList());
}
return result;
}
/**
* Calculates and logs partitions distribution if threshold of uneven distribution {@link #partDistribution} is exceeded.
*
* @param assignments Assignments to calculate partitions distribution.
* @param nodes Affinity nodes number.
* @see IgniteSystemProperties#IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD
*/
private void printDistributionIfThresholdExceeded(List<List<ClusterNode>> assignments, int nodes) {
int locPrimaryCnt = 0;
int locBackupCnt = 0;
for (List<ClusterNode> assignment : assignments) {
for (int i = 0; i < assignment.size(); i++) {
ClusterNode node = assignment.get(i);
if (node.isLocal()) {
if (i == 0)
locPrimaryCnt++;
else
locBackupCnt++;
}
}
}
float expCnt = (float)partsCnt / nodes;
float deltaPrimary = Math.abs(1 - (float)locPrimaryCnt / expCnt) * 100;
float deltaBackup = Math.abs(1 - (float)locBackupCnt / (expCnt * backups)) * 100;
if ((deltaPrimary > partDistribution || deltaBackup > partDistribution) && log.isInfoEnabled()) {
log.info(String.format("Local node affinity assignment distribution is not ideal " +
"[cache=%s, expectedPrimary=%.2f, actualPrimary=%d, " +
"expectedBackups=%.2f, actualBackups=%d, warningThreshold=%.2f%%]",
cacheOrGrpName, expCnt, locPrimaryCnt,
expCnt * backups, locBackupCnt, partDistribution));
}
}
/**
* Copies previous affinity assignment when discovery event does not cause affinity assignment changes
* (e.g. client node joins on leaves).
*
* @param evt Event.
* @param topVer Topology version.
*/
public void clientEventTopologyChange(DiscoveryEvent evt, AffinityTopologyVersion topVer) {
assert topVer.compareTo(lastVersion()) >= 0 : "[topVer = " + topVer + ", last=" + lastVersion() + ']';
GridAffinityAssignmentV2 aff = head.get();
assert evt.type() == EVT_DISCOVERY_CUSTOM_EVT || aff.primaryPartitions(evt.eventNode().id()).isEmpty() : evt;
assert evt.type() == EVT_DISCOVERY_CUSTOM_EVT || aff.backupPartitions(evt.eventNode().id()).isEmpty() : evt;
GridAffinityAssignmentV2 assignmentCpy = new GridAffinityAssignmentV2(topVer, aff);
AffinityTopologyVersion prevVer = topVer.minorTopologyVersion() == 0 ?
new AffinityTopologyVersion(topVer.topologyVersion() - 1, Integer.MAX_VALUE) :
new AffinityTopologyVersion(topVer.topologyVersion(), topVer.minorTopologyVersion() - 1);
Map.Entry<AffinityTopologyVersion, HistoryAffinityAssignment> prevHistEntry = affCache.floorEntry(prevVer);
HistoryAffinityAssignment newHistEntry = (prevHistEntry == null) ?
new HistoryAffinityAssignmentImpl(assignmentCpy, backups) :
new HistoryAffinityAssignmentShallowCopy(prevHistEntry.getValue().origin(), topVer);
HistoryAffinityAssignment existing = affCache.put(topVer, newHistEntry);
head.set(assignmentCpy);
for (Map.Entry<AffinityTopologyVersion, AffinityReadyFuture> entry : readyFuts.entrySet()) {
if (entry.getKey().compareTo(topVer) <= 0) {
if (log.isDebugEnabled())
log.debug("Completing topology ready future (use previous affinity) " +
"[locNodeId=" + ctx.localNodeId() + ", futVer=" + entry.getKey() + ", topVer=" + topVer + ']');
entry.getValue().onDone(topVer);
}
}
onHistoryAdded(existing, newHistEntry);
}
/**
* @return Last initialized affinity version.
*/
public AffinityTopologyVersion lastVersion() {
return head.get().topologyVersion();
}
/**
* @return Last initialized affinity assignment.
*/
public AffinityAssignment lastReadyAffinity() {
return head.get();
}
/**
* @param topVer Topology version.
* @return Affinity assignment.
*/
public List<List<ClusterNode>> assignments(AffinityTopologyVersion topVer) {
AffinityAssignment aff = cachedAffinity(topVer);
return aff.assignment();
}
/**
* @param topVer Topology version.
* @return Affinity assignment.
*/
public List<List<ClusterNode>> readyAssignments(AffinityTopologyVersion topVer) {
AffinityAssignment aff = readyAffinity(topVer);
assert aff != null : "No ready affinity [grp=" + cacheOrGrpName + ", ver=" + topVer + ']';
return aff.assignment();
}
/**
* Gets future that will be completed after topology with version {@code topVer} is calculated.
*
* @param topVer Topology version to await for.
* @return Future that will be completed after affinity for topology version {@code topVer} is calculated.
*/
@Nullable public IgniteInternalFuture<AffinityTopologyVersion> readyFuture(AffinityTopologyVersion topVer) {
GridAffinityAssignmentV2 aff = head.get();
if (aff.topologyVersion().compareTo(topVer) >= 0) {
if (log.isDebugEnabled())
log.debug("Returning finished future for readyFuture [head=" + aff.topologyVersion() +
", topVer=" + topVer + ']');
return null;
}
GridFutureAdapter<AffinityTopologyVersion> fut = F.addIfAbsent(readyFuts, topVer,
new AffinityReadyFuture(topVer));
aff = head.get();
if (aff.topologyVersion().compareTo(topVer) >= 0) {
if (log.isDebugEnabled())
log.debug("Completing topology ready future right away [head=" + aff.topologyVersion() +
", topVer=" + topVer + ']');
fut.onDone(aff.topologyVersion());
}
else if (stopErr != null)
fut.onDone(stopErr);
return fut;
}
/**
* @return Partition count.
*/
public int partitions() {
return partsCnt;
}
/**
* Gets affinity nodes for specified partition.
*
* @param part Partition.
* @param topVer Topology version.
* @return Affinity nodes.
*/
public List<ClusterNode> nodes(int part, AffinityTopologyVersion topVer) {
// Resolve cached affinity nodes.
return cachedAffinity(topVer).get(part);
}
/**
* @param topVer Topology version.
*/
public Set<Integer> partitionPrimariesDifferentToIdeal(AffinityTopologyVersion topVer) {
return cachedAffinity(topVer).partitionPrimariesDifferentToIdeal();
}
/**
* Get primary partitions for specified node ID.
*
* @param nodeId Node ID to get primary partitions for.
* @param topVer Topology version.
* @return Primary partitions for specified node ID.
*/
public Set<Integer> primaryPartitions(UUID nodeId, AffinityTopologyVersion topVer) {
return cachedAffinity(topVer).primaryPartitions(nodeId);
}
/**
* Get backup partitions for specified node ID.
*
* @param nodeId Node ID to get backup partitions for.
* @param topVer Topology version.
* @return Backup partitions for specified node ID.
*/
public Set<Integer> backupPartitions(UUID nodeId, AffinityTopologyVersion topVer) {
return cachedAffinity(topVer).backupPartitions(nodeId);
}
/**
* Dumps debug information.
*
* @return {@code True} if there are pending futures.
*/
public boolean dumpDebugInfo() {
if (!readyFuts.isEmpty()) {
U.warn(log, "First 3 pending affinity ready futures [grp=" + cacheOrGrpName +
", total=" + readyFuts.size() +
", lastVer=" + lastVersion() + "]:");
int cnt = 0;
for (AffinityReadyFuture fut : readyFuts.values()) {
U.warn(log, ">>> " + fut);
if (++cnt == 3)
break;
}
return true;
}
return false;
}
/**
* @param topVer Topology version.
* @return Assignment.
* @throws IllegalStateException If affinity assignment is not initialized for the given topology version.
*/
public AffinityAssignment readyAffinity(AffinityTopologyVersion topVer) {
AffinityAssignment cache = head.get();
if (!cache.topologyVersion().equals(topVer)) {
cache = affCache.get(topVer);
if (cache == null) {
throw new IllegalStateException("Affinity for topology version is " +
"not initialized [locNode=" + ctx.discovery().localNode().id() +
", grp=" + cacheOrGrpName +
", topVer=" + topVer +
", head=" + head.get().topologyVersion() +
", history=" + affCache.keySet() +
", maxNonShallowHistorySize=" + maxNonShallowHistSize +
']');
}
}
return cache;
}
/**
* Get cached affinity for specified topology version.
*
* @param topVer Topology version.
* @return Cached affinity.
* @throws IllegalArgumentException in case of the specified topology version {@code topVer}
* is earlier than affinity is calculated
* or the history of assignments is already cleaned.
*/
public AffinityAssignment cachedAffinity(AffinityTopologyVersion topVer) {
AffinityTopologyVersion lastAffChangeTopVer =
ctx.cache().context().exchange().lastAffinityChangedTopologyVersion(topVer);
return cachedAffinity(topVer, lastAffChangeTopVer);
}
/**
* Get cached affinity for specified topology version.
*
* @param topVer Topology version for which affinity assignment is requested.
* @param lastAffChangeTopVer Topology version of last affinity assignment change.
* @return Cached affinity.
* @throws IllegalArgumentException in case of the specified topology version {@code topVer}
* is earlier than affinity is calculated
* or the history of assignments is already cleaned.
*/
public AffinityAssignment cachedAffinity(
AffinityTopologyVersion topVer,
AffinityTopologyVersion lastAffChangeTopVer
) {
if (topVer.equals(AffinityTopologyVersion.NONE))
topVer = lastAffChangeTopVer = lastVersion();
else {
if (lastAffChangeTopVer.equals(AffinityTopologyVersion.NONE))
lastAffChangeTopVer = topVer;
awaitTopologyVersion(lastAffChangeTopVer);
}
assert topVer.topologyVersion() >= 0 : topVer;
AffinityAssignment cache = head.get();
if (!(cache.topologyVersion().compareTo(lastAffChangeTopVer) >= 0 &&
cache.topologyVersion().compareTo(topVer) <= 0)) {
Map.Entry<AffinityTopologyVersion, HistoryAffinityAssignment> e = affCache.ceilingEntry(lastAffChangeTopVer);
if (e != null)
cache = e.getValue();
if (cache == null) {
throw new IllegalStateException("Getting affinity for topology version earlier than affinity is " +
"calculated [locNode=" + ctx.discovery().localNode() +
", grp=" + cacheOrGrpName +
", topVer=" + topVer +
", lastAffChangeTopVer=" + lastAffChangeTopVer +
", head=" + head.get().topologyVersion() +
", history=" + affCache.keySet() +
", maxNonShallowHistorySize=" + maxNonShallowHistSize +
']');
}
if (cache.topologyVersion().compareTo(topVer) > 0) {
throw new IllegalStateException("Getting affinity for too old topology version that is already " +
"out of history (try to increase '" + IGNITE_AFFINITY_HISTORY_SIZE + "' system property)" +
" [locNode=" + ctx.discovery().localNode() +
", grp=" + cacheOrGrpName +
", topVer=" + topVer +
", lastAffChangeTopVer=" + lastAffChangeTopVer +
", head=" + head.get().topologyVersion() +
", history=" + affCache.keySet() +
", maxNonShallowHistorySize=" + maxNonShallowHistSize +
']');
}
}
assert cache.topologyVersion().compareTo(lastAffChangeTopVer) >= 0 && cache.topologyVersion().compareTo(topVer) <= 0
: "Invalid cached affinity: [cache=" + cache + ", topVer=" + topVer + ", lastAffChangedTopVer=" + lastAffChangeTopVer + "]";
return cache;
}
/**
* @param part Partition.
* @param startVer Start version.
* @param endVer End version.
* @return {@code True} if primary changed or required affinity version not found in history.
*/
public boolean primaryChanged(int part, AffinityTopologyVersion startVer, AffinityTopologyVersion endVer) {
AffinityAssignment aff = affCache.get(startVer);
if (aff == null)
return false;
List<ClusterNode> nodes = aff.get(part);
if (nodes.isEmpty())
return true;
ClusterNode primary = nodes.get(0);
for (AffinityAssignment assignment : affCache.tailMap(startVer, false).values()) {
List<ClusterNode> nodes0 = assignment.assignment().get(part);
if (nodes0.isEmpty())
return true;
if (!nodes0.get(0).equals(primary))
return true;
if (assignment.topologyVersion().equals(endVer))
return false;
}
return true;
}
/**
* @param aff Affinity cache.
*/
public void init(GridAffinityAssignmentCache aff) {
assert aff.lastVersion().compareTo(lastVersion()) >= 0;
assert aff.idealAssignmentRaw() != null;
idealAssignment(aff.lastVersion(), aff.idealAssignmentRaw());
AffinityAssignment assign = aff.cachedAffinity(aff.lastVersion());
initialize(aff.lastVersion(), assign.assignment());
}
/**
* @param topVer Topology version to wait.
*/
private void awaitTopologyVersion(AffinityTopologyVersion topVer) {
GridAffinityAssignmentV2 aff = head.get();
if (aff.topologyVersion().compareTo(topVer) >= 0)
return;
try {
if (log.isDebugEnabled())
log.debug("Will wait for topology version [locNodeId=" + ctx.localNodeId() +
", topVer=" + topVer + ']');
IgniteInternalFuture<AffinityTopologyVersion> fut = readyFuture(topVer);
if (fut != null) {
Thread curTh = Thread.currentThread();
String threadName = curTh.getName();
try {
curTh.setName(threadName + " (waiting " + topVer + ")");
fut.get();
}
finally {
curTh.setName(threadName);
}
}
}
catch (IgniteCheckedException e) {
throw new IgniteException("Failed to wait for affinity ready future for topology version: " + topVer,
e);
}
}
/**
* Cleaning the affinity history.
*
* @param replaced Replaced entry in case history item was already present, null otherwise.
* @param added New history item.
*/
private synchronized void onHistoryAdded(
HistoryAffinityAssignment replaced,
HistoryAffinityAssignment added
) {
if (replaced == null) {
if (added.isFullSizeInstance())
nonShallowHistSize++;
}
else if (replaced.isFullSizeInstance() != added.isFullSizeInstance())
nonShallowHistSize += added.isFullSizeInstance() ? 1 : -1;
int totalSize = affCache.size();
if (!shouldContinueCleanup(nonShallowHistSize, totalSize))
return;
AffinityTopologyVersion lastAffChangeTopVer =
ctx.cache().context().exchange().lastAffinityChangedTopologyVersion(head.get().topologyVersion());
HistoryAffinityAssignment aff0 = null;
Iterator<HistoryAffinityAssignment> it = affCache.values().iterator();
while (it.hasNext() && shouldContinueCleanup(nonShallowHistSize, totalSize)) {
aff0 = it.next();
if (aff0.topologyVersion().equals(lastAffChangeTopVer))
continue; // Keep lastAffinityChangedTopologyVersion, it's required for some operations.
if (aff0.isFullSizeInstance()) {
if (nonShallowHistSize <= MIN_NON_SHALLOW_HIST_SIZE)
continue;
nonShallowHistSize--;
}
totalSize--;
it.remove();
}
assert aff0 != null;
ctx.affinity().removeCachedAffinity(aff0.topologyVersion());
assert it.hasNext() : "All elements have been removed from affinity cache during cleanup";
}
/**
* Checks whether affinity cache size conditions are still unsatisfied.
*
* @param nonShallowSize Non shallow size.
* @param totalSize Total size.
* @return <code>true</code> if affinity cache cleanup is not finished yet.
*/
private boolean shouldContinueCleanup(int nonShallowSize, int totalSize) {
return nonShallowSize > maxNonShallowHistSize || totalSize > maxTotalHistSize;
}
/**
* @return All initialized versions.
*/
public NavigableSet<AffinityTopologyVersion> cachedVersions() {
return affCache.keySet();
}
/**
* @param affAssignment Affinity assignment.
* @return String representation of given {@code affAssignment}.
*/
private static String fold(List<List<ClusterNode>> affAssignment) {
SB sb = new SB();
for (int p = 0; p < affAssignment.size(); p++) {
sb.a("Part [");
sb.a("id=" + p + ", ");
SB partOwners = new SB();
List<ClusterNode> affOwners = affAssignment.get(p);
for (ClusterNode node : affOwners) {
partOwners.a(node.consistentId());
partOwners.a(' ');
}
sb.a("owners=[");
sb.a(partOwners);
sb.a(']');
sb.a("] ");
}
return sb.toString();
}
/**
* Affinity ready future. Will remove itself from ready futures map.
*/
private class AffinityReadyFuture extends GridFutureAdapter<AffinityTopologyVersion> {
/** */
private AffinityTopologyVersion reqTopVer;
/**
*
* @param reqTopVer Required topology version.
*/
private AffinityReadyFuture(AffinityTopologyVersion reqTopVer) {
this.reqTopVer = reqTopVer;
}
/** {@inheritDoc} */
@Override public boolean onDone(AffinityTopologyVersion res, @Nullable Throwable err) {
assert res != null || err != null;
boolean done = super.onDone(res, err);
if (done)
readyFuts.remove(reqTopVer, this);
return done;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(AffinityReadyFuture.class, this);
}
}
}
|
hibernate/hibernate-orm | 36,146 | hibernate-spatial/src/test/java/org/hibernate/spatial/dialect/hana/TestHANASpatialFunctions.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.spatial.dialect.hana;
import java.lang.invoke.MethodHandles;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import jakarta.persistence.Query;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.hibernate.cfg.Configuration;
import org.hibernate.spatial.HSMessageLogger;
import org.hibernate.spatial.testing.SpatialFunctionalTestCase;
import org.hibernate.spatial.testing.dialects.hana.HANAExpectationsFactory;
import org.hibernate.testing.RequiresDialect;
import org.junit.Ignore;
import org.junit.Test;
import org.jboss.logging.Logger;
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.Point;
import org.locationtech.jts.io.WKBWriter;
import org.locationtech.jts.io.WKTWriter;
import static java.lang.String.format;
//TODO - see what tests are still needed, when we update/fix the HANA spatial support
@RequiresDialect(value = HANASpatialDialect.class, comment = "This test tests the HANA spatial functions not covered by Hibernate Spatial", jiraKey = "HHH-12426")
@Ignore
@Deprecated
public class TestHANASpatialFunctions extends SpatialFunctionalTestCase {
private static final HSMessageLogger LOG = Logger.getMessageLogger(
MethodHandles.lookup(),
HSMessageLogger.class,
TestHANASpatialFunctions.class.getName()
);
protected HANAExpectationsFactory hanaExpectationsFactory;
@Override
protected void afterConfigurationBuilt(Configuration cfg) {
super.afterConfigurationBuilt( cfg );
this.hanaExpectationsFactory = (HANAExpectationsFactory) this.expectationsFactory;
}
@Override
protected HSMessageLogger getLogger() {
return LOG;
}
@Test
public void test_alphashape_on_jts() throws SQLException {
alphashape( JTS );
}
@Test
public void test_alphashape_on_geolatte() throws SQLException {
alphashape( GEOLATTE );
}
public void alphashape(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getAlphaShape( 1 );
String hql = format(
Locale.ENGLISH,
"SELECT id, alphashape(geom, 1) FROM %s where geometrytype(geom) in ('ST_Point', 'ST_MultiPoint')",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_area_on_jts() throws SQLException {
area( JTS );
}
@Test
public void test_area_on_geolatte() throws SQLException {
area( GEOLATTE );
}
public void area(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getArea();
String hql = format(
"SELECT id, area(geom) FROM %s where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon')",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_asewkb_on_jts() throws SQLException {
asewkb( JTS );
}
@Test
public void test_asewkb_on_geolatte() throws SQLException {
asewkb( GEOLATTE );
}
public void asewkb(String pckg) throws SQLException {
Map<Integer, byte[]> dbexpected = hanaExpectationsFactory.getAsEWKB();
String hql = format( "SELECT id, asewkb(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_asewkt_on_jts() throws SQLException {
asewkt( JTS );
}
@Test
public void test_asewkt_on_geolatte() throws SQLException {
asewkt( GEOLATTE );
}
public void asewkt(String pckg) throws SQLException {
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsEWKT();
String hql = format( "SELECT id, asewkt(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_asgeojson_on_jts() throws SQLException {
asgeojson( JTS );
}
@Test
public void test_asgeojson_on_geolatte() throws SQLException {
asgeojson( GEOLATTE );
}
public void asgeojson(String pckg) throws SQLException {
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsGeoJSON();
String hql = format( "SELECT id, asgeojson(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_assvg_on_jts() throws SQLException {
assvg( JTS );
}
@Test
public void test_assvg_on_geolatte() throws SQLException {
assvg( GEOLATTE );
}
public void assvg(String pckg) throws SQLException {
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsSVG();
String hql = format( "SELECT id, assvg(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_assvgaggr_on_jts() throws SQLException {
assvgaggr( JTS );
}
@Test
public void test_assvgaggr_on_geolatte() throws SQLException {
assvgaggr( GEOLATTE );
}
public void assvgaggr(String pckg) throws SQLException {
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsSVGAggr();
String hql = format(
"SELECT cast(count(g) as int), assvgaggr(geom) FROM %s g",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_aswkb_on_jts() throws SQLException {
aswkb( JTS );
}
@Test
public void test_aswkb_on_geolatte() throws SQLException {
aswkb( GEOLATTE );
}
public void aswkb(String pckg) throws SQLException {
Map<Integer, byte[]> dbexpected = hanaExpectationsFactory.getAsWKB();
String hql = format( "SELECT id, aswkb(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_aswkt_on_jts() throws SQLException {
aswkt( JTS );
}
@Test
public void test_aswkt_on_geolatte() throws SQLException {
aswkt( GEOLATTE );
}
public void aswkt(String pckg) throws SQLException {
Map<Integer, String> dbexpected = hanaExpectationsFactory.getAsWKT();
String hql = format( "SELECT id, aswkt(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_convexhullaggr_on_jts() throws SQLException {
convexhullaggr( JTS );
}
@Test
public void test_convexhullaggr_on_geolatte() throws SQLException {
convexhullaggr( GEOLATTE );
}
public void convexhullaggr(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getConvexHullAggr();
String hql = format(
"SELECT cast(count(g) as int), convexhullaggr(geom) FROM %s g",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_centroid_on_jts() throws SQLException {
centroid( JTS );
}
@Test
public void test_centroid_on_geolatte() throws SQLException {
centroid( GEOLATTE );
}
public void centroid(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getCentroid();
String hql = format(
"SELECT id, centroid(geom) FROM %s g where geometrytype(geom) = 'ST_Polygon'",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_coorddim_on_jts() throws SQLException {
coorddim( JTS );
}
@Test
public void test_coorddim_on_geolatte() throws SQLException {
coorddim( GEOLATTE );
}
public void coorddim(String pckg) throws SQLException {
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getCoordDim();
String hql = format( "SELECT id, coorddim(geom) FROM %s", entityName( pckg ) );
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_coveredby_on_jts() throws SQLException {
coveredby( JTS );
}
@Test
public void test_coveredby_on_geolatte() throws SQLException {
coveredby( GEOLATTE );
}
public void coveredby(String pckg) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getCoveredBy( expectationsFactory.getTestPolygon() );
String hql = format(
"SELECT id, coveredby(geom, :filter) FROM %s where coveredby(geom, :filter) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
@Test
public void test_covers_on_jts() throws SQLException {
covers( JTS );
}
@Test
public void test_covers_on_geolatte() throws SQLException {
covers( GEOLATTE );
}
public void covers(String pckg) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getCovers( expectationsFactory.getTestPolygon() );
String hql = format(
"SELECT id, covers(geom, :filter) FROM %s where covers(geom, :filter) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
@Test
public void test_endpoint_on_jts() throws SQLException {
endpoint( JTS );
}
@Test
public void test_endpoint_on_geolatte() throws SQLException {
endpoint( GEOLATTE );
}
public void endpoint(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getEndPoint();
String hql = format(
"SELECT id, endpoint(geom) FROM %s g where geometrytype(geom) = 'ST_LineString'",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_envelopeaggr_on_jts() throws SQLException {
envelopeaggr( JTS );
}
@Test
public void test_envelopeaggr_on_geolatte() throws SQLException {
envelopeaggr( GEOLATTE );
}
public void envelopeaggr(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getEnvelopeAggr();
String hql = format(
"SELECT cast(count(g) as int), envelopeaggr(geom) FROM %s g",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_exteriorring_on_jts() throws SQLException {
exteriorring( JTS );
}
@Test
public void test_exteriorring_on_geolatte() throws SQLException {
exteriorring( GEOLATTE );
}
public void exteriorring(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getExteriorRing();
String hql = format(
"SELECT id, exteriorring(geom) FROM %s g where geometrytype(geom) = 'ST_Polygon'",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_geomfromewkb_on_jts() throws SQLException {
geomfromewkb( JTS );
}
@Test
public void test_geomfromewkb_on_geolatte() throws SQLException {
geomfromewkb( GEOLATTE );
}
public void geomfromewkb(String pckg) throws SQLException {
WKBWriter writer = new WKBWriter( 2, true );
byte[] ewkb = writer.write( expectationsFactory.getTestPolygon() );
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromEWKB( ewkb );
String hql = format(
"SELECT 1, cast(geomfromewkb(:param) as %s) FROM %s g",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "param", ewkb );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
@Test
public void test_geomfromewkt_on_jts() throws SQLException {
geomfromewkt( JTS );
}
@Test
public void test_geomfromewkt_on_geolatte() throws SQLException {
geomfromewkt( GEOLATTE );
}
public void geomfromewkt(String pckg) throws SQLException {
WKTWriter writer = new WKTWriter();
String ewkt = "SRID=" + expectationsFactory.getTestSrid() + ";" + writer.write( expectationsFactory.getTestPolygon() );
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromEWKT( ewkt );
String hql = format(
"SELECT 1, cast(geomfromewkt(:param) as %s) FROM %s g",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "param", ewkt );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
@Test
public void test_geomfromtext_on_jts() throws SQLException {
geomfromtext( JTS );
}
@Test
public void test_geomfromtext_on_geolatte() throws SQLException {
geomfromtext( GEOLATTE );
}
public void geomfromtext(String pckg) throws SQLException {
String text = expectationsFactory.getTestPolygon().toText();
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromText( text );
String hql = format(
"SELECT 1, cast(geomfromtext(:param) as %s) FROM %s g",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "param", text );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
@Test
public void test_geomfromwkb_on_jts() throws SQLException {
geomfromwkb( JTS );
}
@Test
public void test_geomfromwkb_on_geolatte() throws SQLException {
geomfromwkb( GEOLATTE );
}
public void geomfromwkb(String pckg) throws SQLException {
WKBWriter writer = new WKBWriter( 2, false );
byte[] wkb = writer.write( expectationsFactory.getTestPolygon() );
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromWKB( wkb );
String hql = format(
"SELECT 1, cast(geomfromwkb(:param) as %s) FROM %s g",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "param", wkb );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
@Test
public void test_geomfromwkt_on_jts() throws SQLException {
geomfromwkt( JTS );
}
@Test
public void test_geomfromwkt_on_geolatte() throws SQLException {
geomfromwkt( GEOLATTE );
}
public void geomfromwkt(String pckg) throws SQLException {
WKTWriter writer = new WKTWriter();
String wkt = writer.write( expectationsFactory.getTestPolygon() );
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeomFromWKT( wkt );
String hql = format(
"SELECT 1, cast(geomfromwkt(:param) as %s) FROM %s g",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "param", wkt );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
@Test
public void test_geometryn_on_jts() throws SQLException {
geometryn( JTS );
}
@Test
public void test_geometryn_on_geolatte() throws SQLException {
geometryn( GEOLATTE );
}
public void geometryn(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getGeometryN( 1 );
String hql = format(
"SELECT id, cast(geometryn(geom, :n) as %s) FROM %s g where geometrytype(geom) = 'ST_GeometryCollection'",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "n", 1 );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
@Test
public void test_interiorringn_on_jts() throws SQLException {
interiorringn( JTS );
}
@Test
public void test_interiorringn_on_geolatte() throws SQLException {
interiorringn( GEOLATTE );
}
public void interiorringn(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getInteriorRingN( 1 );
String hql = format(
"SELECT id, cast(interiorringn(geom, :n) as %s) FROM %s g where geometrytype(geom) = 'ST_Polygon'",
getGeometryTypeFromPackage( pckg ),
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "n", 1 );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
@Test
public void test_intersectionaggr_on_jts() throws SQLException {
intersectionaggr( JTS );
}
@Test
public void test_intersectionaggr_on_geolatte() throws SQLException {
intersectionaggr( GEOLATTE );
}
public void intersectionaggr(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getIntersectionAggr();
String hql = format(
"SELECT cast(count(g) as int), intersectionaggr(geom) FROM %s g",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_intersectsrect_on_jts() throws SQLException {
intersectsrect( JTS );
}
@Test
public void test_intersectsrect_on_geolatte() throws SQLException {
intersectsrect( GEOLATTE );
}
public void intersectsrect(String pckg) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIntersectsRect(
(Point) expectationsFactory.getTestPoint().reverse(),
expectationsFactory.getTestPoint()
);
String hql = format(
"SELECT id, intersectsrect(geom, :pmin, :pmax) FROM %s where intersectsrect(geom, :pmin, :pmax) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
Map<String, Object> params = createQueryParams( "pmin", expectationsFactory.getTestPoint().reverse() );
params.put( "pmax", expectationsFactory.getTestPoint() );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
@Test
public void test_is3d_on_jts() throws SQLException {
is3d( JTS );
}
@Test
public void test_is3d_on_geolatte() throws SQLException {
is3d( GEOLATTE );
}
public void is3d(String pckg) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIs3D();
String hql = format(
"SELECT id, is3d(geom) FROM %s where is3d(geom) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_isclosed_on_jts() throws SQLException {
isclosed( JTS );
}
@Test
public void test_isclosed_on_geolatte() throws SQLException {
isclosed( GEOLATTE );
}
public void isclosed(String pckg) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsClosed();
String hql = format(
"SELECT id, isclosed(geom) FROM %s where geometrytype(geom) in ('ST_LineString', 'ST_MultiLineString') and isclosed(geom) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_ismeasured_on_jts() throws SQLException {
ismeasured( JTS );
}
@Test
public void test_ismeasured_on_geolatte() throws SQLException {
ismeasured( GEOLATTE );
}
public void ismeasured(String pckg) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsMeasured();
String hql = format(
"SELECT id, ismeasured(geom) FROM %s where ismeasured(geom) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_isring_on_jts() throws SQLException {
isring( JTS );
}
@Test
public void test_isring_on_geolatte() throws SQLException {
isring( GEOLATTE );
}
public void isring(String pckg) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsRing();
String hql = format(
"SELECT id, isring(geom) FROM %s where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_isvalid_on_jts() throws SQLException {
isvalid( JTS );
}
@Test
public void test_isvalid_on_geolatte() throws SQLException {
isvalid( GEOLATTE );
}
public void isvalid(String pckg) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getIsValid();
String hql = format(
"SELECT id, isvalid(geom) FROM %s where isvalid(geom) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_length_on_jts() throws SQLException {
length( JTS );
}
@Test
public void test_length_on_geolatte() throws SQLException {
length( GEOLATTE );
}
public void length(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getLength();
String hql = format(
"SELECT id, length(geom) FROM %s where geometrytype(geom) in ('ST_LineString', 'ST_MultiLineString') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_m_on_jts() throws SQLException {
m( JTS );
}
@Test
public void test_m_on_geolatte() throws SQLException {
m( GEOLATTE );
}
public void m(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getM();
String hql = format(
"SELECT id, m(geom) FROM %s where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_mmax_on_jts() throws SQLException {
mmax( JTS );
}
@Test
public void test_mmax_on_geolatte() throws SQLException {
mmax( GEOLATTE );
}
public void mmax(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getMMax();
String hql = format(
"SELECT id, mmax(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_mmin_on_jts() throws SQLException {
mmin( JTS );
}
@Test
public void test_mmin_on_geolatte() throws SQLException {
mmin( GEOLATTE );
}
public void mmin(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getMMin();
String hql = format(
"SELECT id, mmin(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_numgeometries_on_jts() throws SQLException {
numgeometries( JTS );
}
@Test
public void test_numgeometries_on_geolatte() throws SQLException {
numgeometries( GEOLATTE );
}
public void numgeometries(String pckg) throws SQLException {
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumGeometries();
String hql = format(
"SELECT id, numgeometries(geom) FROM %s where geometrytype(geom) in ('ST_GeometryCollection') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_numinteriorring_on_jts() throws SQLException {
numinteriorring( JTS );
}
@Test
public void test_numnuminteriorring_on_geolatte() throws SQLException {
numinteriorring( GEOLATTE );
}
public void numinteriorring(String pckg) throws SQLException {
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumInteriorRing();
String hql = format(
"SELECT id, numinteriorring(geom) FROM %s where geometrytype(geom) in ('ST_Polygon') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_numinteriorrings_on_jts() throws SQLException {
numinteriorrings( JTS );
}
@Test
public void test_numnuminteriorrings_on_geolatte() throws SQLException {
numinteriorrings( GEOLATTE );
}
public void numinteriorrings(String pckg) throws SQLException {
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumInteriorRings();
String hql = format(
"SELECT id, numinteriorrings(geom) FROM %s where geometrytype(geom) in ('ST_Polygon') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_numpoints_on_jts() throws SQLException {
numpoints( JTS );
}
@Test
public void test_numpoints_on_geolatte() throws SQLException {
numpoints( GEOLATTE );
}
public void numpoints(String pckg) throws SQLException {
Map<Integer, Integer> dbexpected = hanaExpectationsFactory.getNumPoints();
String hql = format(
"SELECT id, numpoints(geom) FROM %s where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_orderingequals_on_jts() throws SQLException {
orderingequals( JTS );
}
@Test
public void test_orderingequals_on_geolatte() throws SQLException {
orderingequals( GEOLATTE );
}
public void orderingequals(String pckg) throws SQLException {
Map<Integer, Boolean> dbexpected = hanaExpectationsFactory.getOrderingEquals( expectationsFactory.getTestPolygon() );
String hql = format(
"SELECT id, orderingequals(geom, :filter) FROM %s where orderingequals(geom, :filter) = true and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
@Test
public void test_perimeter_on_jts() throws SQLException {
perimeter( JTS );
}
@Test
public void test_perimeter_on_geolatte() throws SQLException {
perimeter( GEOLATTE );
}
public void perimeter(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getPerimeter();
String hql = format(
"SELECT id, perimeter(geom) FROM %s where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_pointonsurface_on_jts() throws SQLException {
pointonsurface( JTS );
}
@Test
public void test_pointonsurface_on_geolatte() throws SQLException {
pointonsurface( GEOLATTE );
}
public void pointonsurface(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getPointOnSurface();
String hql = format(
"SELECT id, pointonsurface(geom) FROM %s where geometrytype(geom) in ('ST_Polygon', 'ST_MultiPolygon') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_pointn_on_jts() throws SQLException {
pointn( JTS );
}
@Test
public void test_pointn_on_geolatte() throws SQLException {
pointn( GEOLATTE );
}
public void pointn(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getPointN( 1 );
String hql = format(
"SELECT id, pointn(geom, :n) FROM %s where geometrytype(geom) in ('ST_LineString') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
Map<String, Object> params = createQueryParams( "n", 1 );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
// ST_GEOMETRY columns are not supported
@Test(expected = SQLException.class)
public void test_snaptogrid_on_jts() throws SQLException {
snaptogrid( JTS );
}
// ST_GEOMETRY columns are not supported
@Test(expected = SQLException.class)
public void test_snaptogrid_on_geolatte() throws SQLException {
snaptogrid( GEOLATTE );
}
public void snaptogrid(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getSnapToGrid();
String hql = format(
"SELECT id, snaptogrid(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_startpoint_on_jts() throws SQLException {
startpoint( JTS );
}
@Test
public void test_startpoint_on_geolatte() throws SQLException {
startpoint( GEOLATTE );
}
public void startpoint(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getStartPoint();
String hql = format(
"SELECT id, startpoint(geom) FROM %s g where geometrytype(geom) = 'ST_LineString'",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_unionaggr_on_jts() throws SQLException {
unionaggr( JTS );
}
@Test
public void test_unionaggr_on_geolatte() throws SQLException {
unionaggr( GEOLATTE );
}
public void unionaggr(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getUnionAggr();
String hql = format(
"SELECT cast(count(g) as int), unionaggr(geom) FROM %s g",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_x_on_jts() throws SQLException {
x( JTS );
}
@Test
public void test_x_on_geolatte() throws SQLException {
x( GEOLATTE );
}
public void x(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getX();
String hql = format(
"SELECT id, x(geom) FROM %s where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_xmax_on_jts() throws SQLException {
xmax( JTS );
}
@Test
public void test_xmax_on_geolatte() throws SQLException {
xmax( GEOLATTE );
}
public void xmax(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getXMax();
String hql = format(
"SELECT id, xmax(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_xmin_on_jts() throws SQLException {
xmin( JTS );
}
@Test
public void test_xmin_on_geolatte() throws SQLException {
xmin( GEOLATTE );
}
public void xmin(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getXMin();
String hql = format(
"SELECT id, xmin(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_y_on_jts() throws SQLException {
y( JTS );
}
@Test
public void test_y_on_geolatte() throws SQLException {
y( GEOLATTE );
}
public void y(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getY();
String hql = format(
"SELECT id, y(geom) FROM %s where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_ymax_on_jts() throws SQLException {
ymax( JTS );
}
@Test
public void test_ymax_on_geolatte() throws SQLException {
ymax( GEOLATTE );
}
public void ymax(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getYMax();
String hql = format(
"SELECT id, ymax(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_ymin_on_jts() throws SQLException {
ymin( JTS );
}
@Test
public void test_ymin_on_geolatte() throws SQLException {
ymin( GEOLATTE );
}
public void ymin(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getYMin();
String hql = format(
"SELECT id, ymin(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_z_on_jts() throws SQLException {
z( JTS );
}
@Test
public void test_z_on_geolatte() throws SQLException {
z( GEOLATTE );
}
public void z(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZ();
String hql = format(
"SELECT id, z(geom) FROM %s where geometrytype(geom) in ('ST_Point') and srid(geom) = %d",
entityName( pckg ),
expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_zmax_on_jts() throws SQLException {
zmax( JTS );
}
@Test
public void test_zmax_on_geolatte() throws SQLException {
zmax( GEOLATTE );
}
public void zmax(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZMax();
String hql = format(
"SELECT id, zmax(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_zmin_on_jts() throws SQLException {
zmin( JTS );
}
@Test
public void test_zmin_on_geolatte() throws SQLException {
zmin( GEOLATTE );
}
public void zmin(String pckg) throws SQLException {
Map<Integer, Double> dbexpected = hanaExpectationsFactory.getZMin();
String hql = format(
"SELECT id, zmin(geom) FROM %s where srid(geom) = %d",
entityName( pckg ), expectationsFactory.getTestSrid()
);
retrieveHQLResultsAndCompare( dbexpected, hql, pckg );
}
@Test
public void test_nestedfunction_on_jts() throws SQLException {
nestedfunction( JTS );
}
@Test
public void test_nestedfunction_on_geolatte() throws SQLException {
nestedfunction( GEOLATTE );
}
public void nestedfunction(String pckg) throws SQLException {
Map<Integer, Geometry> dbexpected = hanaExpectationsFactory.getNestedFunctionInner( expectationsFactory.getTestPolygon() );
String hql = format(
"SELECT id, geom FROM %s g where dwithin(geom, srid(:filter, 0), 1) = true",
entityName( pckg )
);
Map<String, Object> params = createQueryParams( "filter", expectationsFactory.getTestPolygon() );
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
dbexpected = hanaExpectationsFactory.getNestedFunctionOuter( expectationsFactory.getTestPolygon() );
hql = format(
"SELECT id, geom FROM %s g where dwithin(:filter, srid(geom, 0), 1) = true",
entityName( pckg )
);
retrieveHQLResultsAndCompare( dbexpected, hql, params, pckg );
}
private String getGeometryTypeFromPackage(String pckg) {
switch ( pckg ) {
case GEOLATTE:
return org.geolatte.geom.Geometry.class.getName();
case JTS:
return Geometry.class.getName();
default:
throw new IllegalArgumentException( "Invalid package: " + pckg );
}
}
private Map<String, Object> createQueryParams(String filterParamName, Object value) {
Map<String, Object> params = new HashMap<String, Object>();
params.put( filterParamName, value );
return params;
}
public <T> void retrieveHQLResultsAndCompare(Map<Integer, T> dbexpected, String hql, String geometryType) {
retrieveHQLResultsAndCompare( dbexpected, hql, null, geometryType );
}
protected <T> void retrieveHQLResultsAndCompare(
Map<Integer, T> dbexpected,
String hql,
Map<String, Object> params,
String geometryType) {
Map<Integer, T> hsreceived = new HashMap<Integer, T>();
doInSession( hql, hsreceived, params );
compare( dbexpected, hsreceived, geometryType );
}
private <T> void doInSession(String hql, Map<Integer, T> result, Map<String, Object> params) {
Session session = null;
Transaction tx = null;
try {
session = openSession();
tx = session.beginTransaction();
Query query = session.createQuery( hql );
setParameters( params, query );
addQueryResults( result, query );
}
finally {
if ( tx != null ) {
tx.rollback();
}
if ( session != null ) {
session.close();
}
}
}
private void setParameters(Map<String, Object> params, Query query) {
if ( params == null ) {
return;
}
for ( Map.Entry<String, Object> entry : params.entrySet() ) {
query.setParameter( entry.getKey(), entry.getValue() );
}
}
}
|
apache/gora | 38,228 | gora-orientdb/src/main/java/org/apache/gora/orientdb/store/OrientDBStore.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gora.orientdb.store;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.Properties;
import java.util.List;
import java.util.HashMap;
import java.util.HashSet;
import java.util.ArrayList;
import java.util.Date;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.locks.ReentrantLock;
import java.util.TimeZone;
import java.util.Locale;
import com.github.raymanrt.orientqb.query.Parameter;
import com.gitub.raymanrt.orientqb.delete.Delete;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabasePool;
import com.orientechnologies.orient.core.db.ODatabaseSession;
import com.orientechnologies.orient.core.db.ODatabaseType;
import com.orientechnologies.orient.core.db.OPartitionedDatabasePool;
import com.orientechnologies.orient.core.db.OrientDB;
import com.orientechnologies.orient.core.db.OrientDBConfig;
import com.orientechnologies.orient.core.db.OrientDBConfigBuilder;
import com.orientechnologies.orient.core.db.record.OTrackedList;
import com.orientechnologies.orient.core.db.record.OTrackedMap;
import com.orientechnologies.orient.core.db.record.OTrackedSet;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.OCommandSQL;
import com.orientechnologies.orient.core.sql.query.OConcurrentLegacyResultSet;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
import org.apache.avro.Schema;
import org.apache.avro.util.Utf8;
import org.apache.gora.orientdb.query.OrientDBQuery;
import org.apache.gora.orientdb.query.OrientDBResult;
import org.apache.gora.persistency.impl.BeanFactoryImpl;
import org.apache.gora.persistency.impl.DirtyListWrapper;
import org.apache.gora.persistency.impl.DirtyMapWrapper;
import org.apache.gora.persistency.impl.PersistentBase;
import org.apache.gora.query.PartitionQuery;
import org.apache.gora.query.Query;
import org.apache.gora.query.Result;
import org.apache.gora.query.impl.PartitionQueryImpl;
import org.apache.gora.store.impl.DataStoreBase;
import org.apache.gora.util.AvroUtils;
import org.apache.gora.util.ClassLoadingUtils;
import org.apache.gora.util.GoraException;
import javax.xml.bind.DatatypeConverter;
import static com.github.raymanrt.orientqb.query.Projection.projection;
/**
* {@inheritDoc}
* {@link org.apache.gora.orientdb.store.OrientDBStore} is the primary class
* responsible for facilitating GORA CRUD operations on OrientDB documents.
*/
public class OrientDBStore<K, T extends PersistentBase> extends DataStoreBase<K, T> {
public static final String DEFAULT_MAPPING_FILE = "/gora-orientdb-mapping.xml";
private String ROOT_URL;
private String ROOT_DATABASE_URL;
private OrientDBStoreParameters orientDbStoreParams;
private OrientDBMapping orientDBMapping;
private OrientDB remoteServerAdmin;
private ODatabasePool connectionPool;
private List<ODocument> docBatch = Collections.synchronizedList(new ArrayList<>());
private ReentrantLock flushLock = new ReentrantLock();
private int DEFAULT_DB_POOL_MIN_SIZE = 5;
private int DEFAULT_DB_POOL_MAX_SIZE = 10;
/**
* {@inheritDoc}
* Initialize the OrientDB dataStore by {@link Properties} parameters.
*
* @param keyClass key class type for dataStore.
* @param persistentClass persistent class type for dataStore.
* @param properties OrientDB dataStore properties EG:- OrientDB client credentials.
*/
@Override
public void initialize(Class<K> keyClass, Class<T> persistentClass, Properties properties) throws GoraException {
super.initialize(keyClass, persistentClass, properties);
try {
orientDbStoreParams = OrientDBStoreParameters.load(properties);
ROOT_URL = "remote:".concat(orientDbStoreParams.getServerHost()).concat(":")
.concat(orientDbStoreParams.getServerPort());
ROOT_DATABASE_URL = ROOT_URL.concat("/").concat(orientDbStoreParams.getDatabaseName());
remoteServerAdmin = new OrientDB(ROOT_URL, orientDbStoreParams.getUserName(),
orientDbStoreParams.getUserPassword(), OrientDBConfig.defaultConfig());
if (!remoteServerAdmin.exists(orientDbStoreParams.getDatabaseName())) {
remoteServerAdmin.create(orientDbStoreParams.getDatabaseName(),
ODatabaseType.valueOf(orientDbStoreParams.getStorageType().toUpperCase(Locale.ENGLISH)));
}
if (orientDbStoreParams.getConnectionPoolMinSize() != null &&
orientDbStoreParams.getConnectionPoolMaxSize() != null) {
OrientDBConfigBuilder poolCfg = OrientDBConfig.builder();
poolCfg.addConfig(OGlobalConfiguration.DB_POOL_MIN,
orientDbStoreParams.getConnectionPoolMinSize());
poolCfg.addConfig(OGlobalConfiguration.DB_POOL_MAX,
orientDbStoreParams.getConnectionPoolMaxSize());
connectionPool = new ODatabasePool(remoteServerAdmin,
orientDbStoreParams.getDatabaseName(),
orientDbStoreParams.getUserName(),
orientDbStoreParams.getUserPassword(), poolCfg.build());
} else {
OrientDBConfigBuilder poolCfg = OrientDBConfig.builder();
poolCfg.addConfig(OGlobalConfiguration.DB_POOL_MIN,
DEFAULT_DB_POOL_MIN_SIZE);
poolCfg.addConfig(OGlobalConfiguration.DB_POOL_MAX,
DEFAULT_DB_POOL_MAX_SIZE);
connectionPool = new ODatabasePool(remoteServerAdmin,
orientDbStoreParams.getDatabaseName(),
orientDbStoreParams.getUserName(),
orientDbStoreParams.getUserPassword(), poolCfg.build());
}
OrientDBMappingBuilder<K, T> builder = new OrientDBMappingBuilder<>(this);
orientDBMapping = builder.fromFile(orientDbStoreParams.getMappingFile()).build();
if (!schemaExists()) {
createSchema();
}
} catch (Exception e) {
LOG.error("Error while initializing OrientDB dataStore: {}",
new Object[]{e.getMessage()});
throw new RuntimeException(e);
}
}
/**
* {@inheritDoc}
*/
@Override
public String getSchemaName(final String mappingSchemaName,
final Class<?> persistentClass) {
return super.getSchemaName(mappingSchemaName, persistentClass);
}
/**
* {@inheritDoc}
*/
@Override
public String getSchemaName() {
return orientDBMapping.getDocumentClass();
}
/**
* {@inheritDoc}
* Create a new class of OrientDB documents if necessary. Enforce specified schema over the document class. *
*/
@Override
public void createSchema() throws GoraException {
if (schemaExists()) {
return;
}
try (ODatabaseSession schemaTx = connectionPool.acquire()) {
schemaTx.activateOnCurrentThread();
OClass documentClass = schemaTx.getMetadata().getSchema().createClass(orientDBMapping.getDocumentClass());
documentClass.createProperty("_id",
OType.getTypeByClass(super.getKeyClass())).createIndex(OClass.INDEX_TYPE.UNIQUE);
for (String docField : orientDBMapping.getDocumentFields()) {
documentClass.createProperty(docField,
OType.valueOf(orientDBMapping.getDocumentFieldType(docField).name()));
}
schemaTx.getMetadata().getSchema().reload();
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* {@inheritDoc}
* Deletes enforced schema over OrientDB Document class.
*/
@Override
public void deleteSchema() throws GoraException {
if (!schemaExists()) {
return;
}
try (ODatabaseSession schemaTx = connectionPool.acquire()) {
schemaTx.activateOnCurrentThread();
schemaTx.getMetadata().getSchema().dropClass(orientDBMapping.getDocumentClass());
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* {@inheritDoc}
* Check whether there exist a schema enforced over OrientDB document class.
*/
@Override
public boolean schemaExists() throws GoraException {
try (ODatabaseSession schemaTx = connectionPool.acquire()) {
schemaTx.activateOnCurrentThread();
return schemaTx.getMetadata().getSchema()
.existsClass(orientDBMapping.getDocumentClass());
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* {@inheritDoc}
*/
@Override
public T get(K key, String[] fields) throws GoraException {
String[] dbFields = getFieldsToQuery(fields);
com.github.raymanrt.orientqb.query.Query selectQuery = new com.github.raymanrt.orientqb.query.Query();
for (String k : dbFields) {
String dbFieldName = orientDBMapping.getDocumentField(k);
if (dbFieldName != null && dbFieldName.length() > 0) {
selectQuery.select(dbFieldName);
}
}
selectQuery.from(orientDBMapping.getDocumentClass())
.where(projection("_id").eq(Parameter.parameter("key")));
Map<String, Object> params = new HashMap<String, Object>();
params.put("key", key);
OSQLSynchQuery<ODocument> query = new OSQLSynchQuery<ODocument>(selectQuery.toString());
try (ODatabaseSession selectTx = connectionPool.acquire()) {
selectTx.activateOnCurrentThread();
List<ODocument> result = selectTx.command(query).execute(params);
if (result.size() == 1) {
return convertOrientDocToAvroBean(result.get(0), dbFields);
} else {
return null;
}
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* {@inheritDoc}
*/
@Override
public void put(K key, T val) throws GoraException {
if (val.isDirty()) {
OrientDBQuery<K, T> dataStoreQuery = new OrientDBQuery<>(this);
dataStoreQuery.setStartKey(key);
dataStoreQuery.setEndKey(key);
dataStoreQuery.populateOrientDBQuery(orientDBMapping, getFieldsToQuery(null), getFields());
try (ODatabaseSession selectTx = connectionPool.acquire()) {
selectTx.activateOnCurrentThread();
// TODO : further optimize for queries to separate cases update / insert == get rid of select all query
// TODO : for update
List<ODocument> result = selectTx.command(dataStoreQuery.getOrientDBQuery())
.execute(dataStoreQuery.getParams());
if (result.size() == 1) {
ODocument document = updateOrientDocFromAvroBean(key, val, result.get(0));
docBatch.add(document);
} else {
ODocument document = convertAvroBeanToOrientDoc(key, val);
docBatch.add(document);
}
} catch (Exception e) {
throw new GoraException(e);
}
} else {
if (LOG.isDebugEnabled()) {
LOG.info("Ignored putting persistent bean {} in the store as it is neither "
+ "new, neither dirty.", new Object[]{val});
}
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean delete(K key) throws GoraException {
Delete delete = new Delete();
delete.from(orientDBMapping.getDocumentClass())
.where(projection("_id").eq(Parameter.parameter("key")));
Map<String, Object> params = new HashMap<String, Object>();
params.put("key", key);
OCommandSQL query = new OCommandSQL(delete.toString().replace("DELETE", "DELETE FROM"));
try (ODatabaseSession deleteTx = connectionPool.acquire()) {
deleteTx.activateOnCurrentThread();
int deleteCount = deleteTx.command(query).execute(params);
if (deleteCount == 1) {
return true;
} else {
return false;
}
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* {@inheritDoc}
*/
@Override
public long deleteByQuery(Query<K, T> query) throws GoraException {
Delete delete = new Delete();
delete.from(orientDBMapping.getDocumentClass());
Map<String, Object> params = new HashMap<String, Object>();
if (query.getFields() == null || (query.getFields().length == getFields().length)) {
if (query.getStartKey() != null) {
delete.where(projection("_id").ge(Parameter.parameter("start")));
params.put("start", query.getStartKey());
}
if (query.getEndKey() != null) {
delete.where(projection("_id").le(Parameter.parameter("end")));
params.put("end", query.getEndKey());
}
OCommandSQL dbQuery = new OCommandSQL(delete.toString().replace("DELETE", "DELETE FROM"));
try (ODatabaseSession deleteTx = connectionPool.acquire()) {
deleteTx.activateOnCurrentThread();
int deleteCount;
if (params.isEmpty()) {
deleteCount = deleteTx.command(dbQuery).execute();
} else {
deleteCount = deleteTx.command(dbQuery).execute(params);
}
if (deleteCount > 0) {
return deleteCount;
} else {
return 0;
}
} catch (Exception e) {
throw new GoraException(e);
}
} else {
OrientDBQuery<K, T> dataStoreQuery = new OrientDBQuery<>(this);
dataStoreQuery.setStartKey(query.getStartKey());
dataStoreQuery.setEndKey(query.getEndKey());
dataStoreQuery.populateOrientDBQuery(orientDBMapping, getFieldsToQuery(null), getFields());
try (ODatabaseSession selectTx = connectionPool.acquire()) {
selectTx.activateOnCurrentThread();
List<ODocument> result = selectTx.command(dataStoreQuery.getOrientDBQuery())
.execute(dataStoreQuery.getParams());
if (result != null && result.isEmpty()) {
return 0;
} else {
for (ODocument doc : result) {
for (String docField : query.getFields()) {
if (doc.containsField(orientDBMapping.getDocumentField(docField))) {
doc.removeField(orientDBMapping.getDocumentField(docField));
}
}
doc.save();
}
return result.size();
}
} catch (Exception e) {
throw new GoraException(e);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public Result<K, T> execute(Query<K, T> query) throws GoraException {
String[] fields = getFieldsToQuery(query.getFields());
OrientDBQuery dataStoreQuery;
if (query instanceof OrientDBQuery) {
dataStoreQuery = ((OrientDBQuery) query);
} else {
dataStoreQuery = (OrientDBQuery) ((PartitionQueryImpl<K, T>) query).getBaseQuery();
}
dataStoreQuery.populateOrientDBQuery(orientDBMapping, fields, getFields());
try (ODatabaseSession selectTx = connectionPool.acquire()) {
selectTx.activateOnCurrentThread();
OConcurrentLegacyResultSet<ODocument> result = selectTx.command(dataStoreQuery.getOrientDBQuery())
.execute(dataStoreQuery.getParams());
result.setLimit((int) query.getLimit());
return new OrientDBResult<K, T>(this, query, result);
} catch (Exception e) {
throw new GoraException(e);
}
}
/**
* {@inheritDoc}
*/
@Override
public Query<K, T> newQuery() {
OrientDBQuery<K, T> query = new OrientDBQuery<K, T>(this);
query.setFields(getFieldsToQuery(null));
return new OrientDBQuery<K, T>(this);
}
/**
* {@inheritDoc}
*/
@Override
public List<PartitionQuery<K, T>> getPartitions(Query<K, T> query) throws IOException {
// TODO : Improve code on OrientDB clusters
List<PartitionQuery<K, T>> partitions = new ArrayList<>();
PartitionQueryImpl<K, T> partitionQuery = new PartitionQueryImpl<>(
query);
partitionQuery.setConf(this.getConf());
partitions.add(partitionQuery);
return partitions;
}
/**
* {@inheritDoc}
* Flushes locally cached to content in memory to remote OrientDB server.
*/
@Override
public void flush() throws GoraException {
try (ODatabaseSession updateTx = connectionPool.acquire()) {
updateTx.activateOnCurrentThread();
flushLock.lock();
for (ODocument document : docBatch) {
updateTx.save(document);
}
} catch (Exception e) {
throw new GoraException(e);
} finally {
docBatch.clear();
flushLock.unlock();
}
}
/**
* {@inheritDoc}
* Releases resources which have been used dataStore. Eg:- OrientDB Client connection pool.
*/
@Override
public void close() {
try {
flush();
} catch (Exception ex) {
LOG.error("Error occurred while flushing data to OrientDB : ", ex);
}
docBatch.clear();
remoteServerAdmin.close();
connectionPool.close();
}
/**
* Returns OrientDB client connection pool maintained at Gora dataStore.
*
* @return {@link OPartitionedDatabasePool} OrientDB client connection pool.
*/
public ODatabasePool getConnectionPool() {
return connectionPool;
}
public T convertOrientDocToAvroBean(final ODocument obj, final String[] fields) throws GoraException {
T persistent = newPersistent();
String[] dbFields = getFieldsToQuery(fields);
for (String f : dbFields) {
String docf = orientDBMapping.getDocumentField(f);
if (docf == null || !obj.containsField(docf))
continue;
OrientDBMapping.DocumentFieldType storeType = orientDBMapping.getDocumentFieldType(docf);
Schema.Field field = fieldMap.get(f);
Schema fieldSchema = field.schema();
LOG.debug("Load from ODocument, field:{}, schemaType:{}, docField:{}, storeType:{}",
new Object[]{field.name(), fieldSchema.getType(), docf, storeType});
Object result = convertDocFieldToAvroField(fieldSchema, storeType, field, docf, obj);
persistent.put(field.pos(), result);
}
persistent.clearDirty();
return persistent;
}
private Object convertDocFieldToAvroField(final Schema fieldSchema,
final OrientDBMapping.DocumentFieldType storeType,
final Schema.Field field,
final String docf,
final ODocument obj) throws GoraException {
Object result = null;
switch (fieldSchema.getType()) {
case MAP:
result = convertDocFieldToAvroMap(docf, fieldSchema, obj, field, storeType);
break;
case ARRAY:
result = convertDocFieldToAvroList(docf, fieldSchema, obj, field, storeType);
break;
case RECORD:
ODocument record = obj.field(docf);
if (record == null) {
result = null;
break;
}
result = convertAvroBeanToOrientDoc(fieldSchema, record);
break;
case BOOLEAN:
result = OType.convert(obj.field(docf), Boolean.class);
break;
case DOUBLE:
result = OType.convert(obj.field(docf), Double.class);
break;
case FLOAT:
result = OType.convert(obj.field(docf), Float.class);
break;
case INT:
result = OType.convert(obj.field(docf), Integer.class);
break;
case LONG:
result = OType.convert(obj.field(docf), Long.class);
break;
case STRING:
result = convertDocFieldToAvroString(storeType, docf, obj);
break;
case ENUM:
result = AvroUtils.getEnumValue(fieldSchema, obj.field(docf));
break;
case BYTES:
case FIXED:
if (obj.field(docf) == null) {
result = null;
break;
}
result = ByteBuffer.wrap((byte[]) obj.field(docf));
break;
case NULL:
result = null;
break;
case UNION:
result = convertDocFieldToAvroUnion(fieldSchema, storeType, field, docf, obj);
break;
default:
LOG.warn("Unable to read {}", docf);
break;
}
return result;
}
private Object convertDocFieldToAvroList(final String docf,
final Schema fieldSchema,
final ODocument doc,
final Schema.Field f,
final OrientDBMapping.DocumentFieldType storeType) throws GoraException {
if (storeType == OrientDBMapping.DocumentFieldType.EMBEDDEDSET) {
OTrackedSet<Object> set = doc.field(docf);
List<Object> rlist = new ArrayList<>();
if (set == null) {
return new DirtyListWrapper(rlist);
}
for (Object item : set) {
Object o = convertDocFieldToAvroField(fieldSchema.getElementType(), storeType, f,
"item", new ODocument("item", item));
rlist.add(o);
}
return new DirtyListWrapper<>(rlist);
} else {
OTrackedList<Object> list = doc.field(docf);
List<Object> rlist = new ArrayList<>();
if (list == null) {
return new DirtyListWrapper(rlist);
}
for (Object item : list) {
Object o = convertDocFieldToAvroField(fieldSchema.getElementType(), storeType, f,
"item", new ODocument("item", item));
rlist.add(o);
}
return new DirtyListWrapper<>(rlist);
}
}
private Object convertAvroListToDocField(final String docf, final Collection<?> array,
final Schema fieldSchema, final Schema.Type fieldType,
final OrientDBMapping.DocumentFieldType storeType) {
if (storeType == OrientDBMapping.DocumentFieldType.EMBEDDEDLIST) {
ArrayList list;
list = new ArrayList<Object>();
if (array == null)
return list;
for (Object item : array) {
OrientDBMapping.DocumentFieldType fieldStoreType = orientDBMapping.getDocumentFieldType(docf);
Object result = convertAvroFieldToOrientField(docf, fieldSchema, fieldType, fieldStoreType, item);
list.add(result);
}
return list;
} else if (storeType == OrientDBMapping.DocumentFieldType.EMBEDDEDSET) {
HashSet set;
set = new HashSet<Object>();
if (array == null)
return set;
for (Object item : array) {
OrientDBMapping.DocumentFieldType fieldStoreType = orientDBMapping.getDocumentFieldType(docf);
Object result = convertAvroFieldToOrientField(docf, fieldSchema, fieldType, fieldStoreType, item);
set.add(result);
}
return set;
}
return null;
}
private Object convertDocFieldToAvroMap(final String docf, final Schema fieldSchema,
final ODocument doc, final Schema.Field f,
final OrientDBMapping.DocumentFieldType storeType) throws GoraException {
if (storeType == OrientDBMapping.DocumentFieldType.EMBEDDEDMAP) {
OTrackedMap<Object> map = doc.field(docf);
Map<Utf8, Object> rmap = new HashMap<>();
if (map == null) {
return new DirtyMapWrapper(rmap);
}
for (Map.Entry entry : map.entrySet()) {
String mapKey = decodeFieldKey((String) entry.getKey());
Object o = convertDocFieldToAvroField(fieldSchema.getValueType(), storeType, f, mapKey,
decorateOTrackedMapToODoc(map));
rmap.put(new Utf8(mapKey), o);
}
return new DirtyMapWrapper<>(rmap);
} else {
ODocument innerDoc = doc.field(docf);
Map<Utf8, Object> rmap = new HashMap<>();
if (innerDoc == null) {
return new DirtyMapWrapper(rmap);
}
for (String fieldName : innerDoc.fieldNames()) {
String mapKey = decodeFieldKey(fieldName);
Object o = convertDocFieldToAvroField(fieldSchema.getValueType(), storeType, f, mapKey,
innerDoc);
rmap.put(new Utf8(mapKey), o);
}
return new DirtyMapWrapper<>(rmap);
}
}
private ODocument decorateOTrackedMapToODoc(OTrackedMap<Object> map) {
ODocument doc = new ODocument();
for (Map.Entry entry : map.entrySet()) {
doc.field((String) entry.getKey(), entry.getValue());
}
return doc;
}
private Object convertAvroMapToDocField(final String docf,
final Map<CharSequence, ?> value, final Schema fieldSchema,
final Schema.Type fieldType,
final OrientDBMapping.DocumentFieldType storeType) {
if (storeType == OrientDBMapping.DocumentFieldType.EMBEDDEDMAP) {
HashMap map = new HashMap<String, Object>();
if (value == null)
return map;
for (Map.Entry<CharSequence, ?> e : value.entrySet()) {
String mapKey = encodeFieldKey(e.getKey().toString());
Object mapValue = e.getValue();
OrientDBMapping.DocumentFieldType fieldStoreType = orientDBMapping.getDocumentFieldType(docf);
Object result = convertAvroFieldToOrientField(docf, fieldSchema, fieldType, fieldStoreType,
mapValue);
map.put(mapKey, result);
}
return map;
} else {
ODocument doc = new ODocument("map" + docf);
if (value == null)
return doc;
for (Map.Entry<CharSequence, ?> e : value.entrySet()) {
String mapKey = encodeFieldKey(e.getKey().toString());
Object mapValue = e.getValue();
OrientDBMapping.DocumentFieldType fieldStoreType = orientDBMapping.getDocumentFieldType(docf);
Object result = convertAvroFieldToOrientField(docf, fieldSchema, fieldType, fieldStoreType,
mapValue);
doc.field(mapKey, result);
}
return doc;
}
}
private Object convertAvroBeanToOrientDoc(final Schema fieldSchema,
final ODocument doc) throws GoraException {
Object result;
Class<?> clazz = null;
try {
clazz = ClassLoadingUtils.loadClass(fieldSchema.getFullName());
} catch (Exception e) {
throw new GoraException(e);
}
PersistentBase record = (PersistentBase) new BeanFactoryImpl(keyClass, clazz).newPersistent();
for (Schema.Field recField : fieldSchema.getFields()) {
Schema innerSchema = recField.schema();
OrientDBMapping.DocumentFieldType innerStoreType = orientDBMapping
.getDocumentFieldType(recField.name());
String innerDocField = orientDBMapping.getDocumentField(recField.name()) != null ? orientDBMapping
.getDocumentField(recField.name()) : recField.name();
LOG.debug("Load from ODocument (RECORD), field:{}, schemaType:{}, docField:{}, storeType:{}",
new Object[]{recField.name(), innerSchema.getType(), innerDocField,
innerStoreType});
record.put(recField.pos(),
convertDocFieldToAvroField(innerSchema, innerStoreType, recField, innerDocField,
doc));
}
result = record;
return result;
}
private Object convertDocFieldToAvroString(final OrientDBMapping.DocumentFieldType storeType,
final String docf, final ODocument doc) {
Object result;
if (storeType == OrientDBMapping.DocumentFieldType.DATE ||
storeType == OrientDBMapping.DocumentFieldType.DATETIME) {
Date dateTime = doc.field(docf);
Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.getDefault());
calendar.setTime(dateTime);
result = new Utf8(DatatypeConverter.printDateTime(calendar));
} else {
result = new Utf8((String) doc.field(encodeFieldKey(docf)));
}
return result;
}
private Object convertDocFieldToAvroUnion(final Schema fieldSchema,
final OrientDBMapping.DocumentFieldType storeType,
final Schema.Field field,
final String docf,
final ODocument doc) throws GoraException {
Object result;
Schema.Type type0 = fieldSchema.getTypes().get(0).getType();
Schema.Type type1 = fieldSchema.getTypes().get(1).getType();
if (!type0.equals(type1)
&& (type0.equals(Schema.Type.NULL) || type1.equals(Schema.Type.NULL))) {
Schema innerSchema = null;
if (type0.equals(Schema.Type.NULL)) {
innerSchema = fieldSchema.getTypes().get(1);
} else {
innerSchema = fieldSchema.getTypes().get(0);
}
LOG.debug("Load from ODocument (UNION), schemaType:{}, docField:{}, storeType:{}",
new Object[]{innerSchema.getType(), docf, storeType});
result = convertDocFieldToAvroField(innerSchema, storeType, field, docf, doc);
} else {
throw new GoraException("OrientDBStore only supports Union of two types field.");
}
return result;
}
private Object convertAvroUnionToOrientDBField(final String docf, final Schema fieldSchema,
final OrientDBMapping.DocumentFieldType storeType,
final Object value) {
Object result;
Schema.Type type0 = fieldSchema.getTypes().get(0).getType();
Schema.Type type1 = fieldSchema.getTypes().get(1).getType();
if (!type0.equals(type1)
&& (type0.equals(Schema.Type.NULL) || type1.equals(Schema.Type.NULL))) {
Schema innerSchema = null;
if (type0.equals(Schema.Type.NULL)) {
innerSchema = fieldSchema.getTypes().get(1);
} else {
innerSchema = fieldSchema.getTypes().get(0);
}
LOG.debug("Transform value to ODocument (UNION), type:{}, storeType:{}",
new Object[]{innerSchema.getType(), type1, storeType});
result = convertAvroFieldToOrientField(docf, innerSchema, innerSchema.getType(), storeType, value);
} else {
throw new IllegalStateException("OrientDBStore only supports Union of two types field.");
}
return result;
}
private ODocument convertAvroBeanToOrientDoc(final K key, final T persistent) {
ODocument result = new ODocument(orientDBMapping.getDocumentClass());
for (Schema.Field f : persistent.getSchema().getFields()) {
if (persistent.isDirty(f.pos()) && (persistent.get(f.pos()) != null)) {
String docf = orientDBMapping.getDocumentField(f.name());
Object value = persistent.get(f.pos());
OrientDBMapping.DocumentFieldType storeType = orientDBMapping.getDocumentFieldType(docf);
LOG.debug("Transform value to ODocument, docField:{}, schemaType:{}, storeType:{}",
new Object[]{docf, f.schema().getType(), storeType});
Object o = convertAvroFieldToOrientField(docf, f.schema(), f.schema().getType(),
storeType, value);
result.field(docf, o);
}
}
result.field("_id", key);
return result;
}
private ODocument updateOrientDocFromAvroBean(final K key, final T persistent, final ODocument result) {
for (Schema.Field f : persistent.getSchema().getFields()) {
if (persistent.isDirty(f.pos()) /*&& (persistent.get(f.pos()) != null)*/) {
String docf = orientDBMapping.getDocumentField(f.name());
if (persistent.get(f.pos()) == null) {
result.removeField(docf);
continue;
}
Object value = persistent.get(f.pos());
OrientDBMapping.DocumentFieldType storeType = orientDBMapping.getDocumentFieldType(docf);
LOG.debug("Transform value to ODocument, docField:{}, schemaType:{}, storeType:{}",
new Object[]{docf, f.schema().getType(), storeType});
Object o = convertAvroFieldToOrientField(docf, f.schema(), f.schema().getType(),
storeType, value);
result.field(docf, o);
}
}
return result;
}
private Object convertAvroFieldToOrientField(final String docf, final Schema fieldSchema,
final Schema.Type fieldType,
final OrientDBMapping.DocumentFieldType storeType,
final Object value) {
Object result = null;
switch (fieldType) {
case MAP:
if (storeType != null && !(storeType == OrientDBMapping.DocumentFieldType.EMBEDDEDMAP ||
storeType == OrientDBMapping.DocumentFieldType.EMBEDDED)) {
throw new IllegalStateException(
"Field " + fieldSchema.getName()
+ ": to store a AVRO 'map', target OrientDB mapping have to be of type 'EmbeddedMap'" +
"| 'Embedded'");
}
Schema valueSchema = fieldSchema.getValueType();
result = convertAvroMapToDocField(docf, (Map<CharSequence, ?>) value, valueSchema,
valueSchema.getType(), storeType);
break;
case ARRAY:
if (storeType != null && !(storeType == OrientDBMapping.DocumentFieldType.EMBEDDEDLIST ||
storeType == OrientDBMapping.DocumentFieldType.EMBEDDEDSET)) {
throw new IllegalStateException("Field " + fieldSchema.getName()
+ ": To store a AVRO 'array', target Mongo mapping have to be of type 'EmbeddedMap'" +
"|'EmbeddedList'");
}
Schema elementSchema = fieldSchema.getElementType();
result = convertAvroListToDocField(docf, (List<?>) value, elementSchema,
elementSchema.getType(), storeType);
break;
case BYTES:
if (value != null) {
result = ((ByteBuffer) value).array();
}
break;
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case BOOLEAN:
result = value;
break;
case STRING:
result = convertAvroStringToDocField(fieldSchema, storeType, value);
break;
case ENUM:
if (value != null)
result = value.toString();
break;
case RECORD:
if (value == null)
break;
result = convertAvroBeanToOrientDoc(docf, fieldSchema, value);
break;
case UNION:
result = convertAvroUnionToOrientDBField(docf, fieldSchema, storeType, value);
break;
case FIXED:
result = value;
break;
default:
LOG.error("Unknown field type: {}", fieldSchema.getType());
break;
}
return result;
}
private Object convertAvroStringToDocField(final Schema fieldSchema,
final OrientDBMapping.DocumentFieldType storeType,
final Object value) {
Object result = null;
if (storeType == OrientDBMapping.DocumentFieldType.DATETIME) {
if (value != null) {
Calendar dateTime = null;
try {
dateTime = DatatypeConverter.parseDateTime(value.toString());
} catch (IllegalArgumentException e) {
throw new IllegalStateException("Field " + fieldSchema.getType()
+ ": Invalid date and time format '" + value + "'", e);
}
result = dateTime.getTime();
}
} else if (storeType == OrientDBMapping.DocumentFieldType.DATE) {
Calendar date = null;
try {
date = DatatypeConverter.parseDate(value.toString());
} catch (IllegalArgumentException e) {
throw new IllegalStateException("Field " + fieldSchema.getType()
+ ": Invalid date format '" + value + "'", e);
}
result = date.getTime();
} else {
if (value != null) {
result = value.toString();
}
}
return result;
}
private ODocument convertAvroBeanToOrientDoc(final String docf,
final Schema fieldSchema,
final Object value) {
ODocument record = new ODocument("record" + docf);
for (Schema.Field member : fieldSchema.getFields()) {
Object innerValue = ((PersistentBase) value).get(member.pos());
String innerDoc = orientDBMapping.getDocumentField(member.name());
Schema.Type innerType = member.schema().getType();
OrientDBMapping.DocumentFieldType innerStoreType = orientDBMapping.getDocumentFieldType(innerDoc);
LOG.debug("Transform value to ODocument , docField:{}, schemaType:{}, storeType:{}",
new Object[]{member.name(), member.schema().getType(),
innerStoreType});
Object fieldValue = convertAvroFieldToOrientField(docf, member.schema()
, innerType, innerStoreType, innerValue);
record.field(member.name(), fieldValue);
}
return record;
}
private String encodeFieldKey(final String key) {
if (key == null) {
return null;
}
return key.replace(".", "\u00B7")
.replace(":", "\u00FF")
.replace(";", "\u00FE")
.replace(" ", "\u00FD")
.replace("%", "\u00FC")
.replace("=", "\u00FB");
}
private String decodeFieldKey(final String key) {
if (key == null) {
return null;
}
return key.replace("\u00B7", ".")
.replace("\u00FF", ":")
.replace("\u00FE", ";")
.replace("\u00FD", " ")
.replace("\u00FC", "%")
.replace("\u00FB", "=");
}
@Override
public boolean exists(K key) throws GoraException {
return get(key) != null;
}
}
|
googleapis/google-cloud-java | 38,108 | java-scheduler/proto-google-cloud-scheduler-v1beta1/src/main/java/com/google/cloud/scheduler/v1beta1/PubsubTarget.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/scheduler/v1beta1/target.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.scheduler.v1beta1;
/**
*
*
* <pre>
* Pub/Sub target. The job will be delivered by publishing a message to
* the given Pub/Sub topic.
* </pre>
*
* Protobuf type {@code google.cloud.scheduler.v1beta1.PubsubTarget}
*/
public final class PubsubTarget extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.scheduler.v1beta1.PubsubTarget)
PubsubTargetOrBuilder {
private static final long serialVersionUID = 0L;
// Use PubsubTarget.newBuilder() to construct.
private PubsubTarget(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PubsubTarget() {
topicName_ = "";
data_ = com.google.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PubsubTarget();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.scheduler.v1beta1.TargetProto
.internal_static_google_cloud_scheduler_v1beta1_PubsubTarget_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 4:
return internalGetAttributes();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.scheduler.v1beta1.TargetProto
.internal_static_google_cloud_scheduler_v1beta1_PubsubTarget_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.scheduler.v1beta1.PubsubTarget.class,
com.google.cloud.scheduler.v1beta1.PubsubTarget.Builder.class);
}
public static final int TOPIC_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object topicName_ = "";
/**
*
*
* <pre>
* Required. The name of the Cloud Pub/Sub topic to which messages will
* be published when a job is delivered. The topic name must be in the
* same format as required by Pub/Sub's
* [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest),
* for example `projects/PROJECT_ID/topics/TOPIC_ID`.
*
* The topic must be in the same project as the Cloud Scheduler job.
* </pre>
*
* <code>string topic_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The topicName.
*/
@java.lang.Override
public java.lang.String getTopicName() {
java.lang.Object ref = topicName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
topicName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the Cloud Pub/Sub topic to which messages will
* be published when a job is delivered. The topic name must be in the
* same format as required by Pub/Sub's
* [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest),
* for example `projects/PROJECT_ID/topics/TOPIC_ID`.
*
* The topic must be in the same project as the Cloud Scheduler job.
* </pre>
*
* <code>string topic_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for topicName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTopicNameBytes() {
java.lang.Object ref = topicName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
topicName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DATA_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* The message payload for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>bytes data = 3;</code>
*
* @return The data.
*/
@java.lang.Override
public com.google.protobuf.ByteString getData() {
return data_;
}
public static final int ATTRIBUTES_FIELD_NUMBER = 4;
private static final class AttributesDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
com.google.cloud.scheduler.v1beta1.TargetProto
.internal_static_google_cloud_scheduler_v1beta1_PubsubTarget_AttributesEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.STRING,
"");
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, java.lang.String> attributes_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetAttributes() {
if (attributes_ == null) {
return com.google.protobuf.MapField.emptyMapField(AttributesDefaultEntryHolder.defaultEntry);
}
return attributes_;
}
public int getAttributesCount() {
return internalGetAttributes().getMap().size();
}
/**
*
*
* <pre>
* Attributes for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>map<string, string> attributes = 4;</code>
*/
@java.lang.Override
public boolean containsAttributes(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetAttributes().getMap().containsKey(key);
}
/** Use {@link #getAttributesMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getAttributes() {
return getAttributesMap();
}
/**
*
*
* <pre>
* Attributes for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>map<string, string> attributes = 4;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getAttributesMap() {
return internalGetAttributes().getMap();
}
/**
*
*
* <pre>
* Attributes for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>map<string, string> attributes = 4;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getAttributesOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetAttributes().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Attributes for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>map<string, string> attributes = 4;</code>
*/
@java.lang.Override
public java.lang.String getAttributesOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetAttributes().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topicName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, topicName_);
}
if (!data_.isEmpty()) {
output.writeBytes(3, data_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetAttributes(), AttributesDefaultEntryHolder.defaultEntry, 4);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topicName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, topicName_);
}
if (!data_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream.computeBytesSize(3, data_);
}
for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
internalGetAttributes().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> attributes__ =
AttributesDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, attributes__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.scheduler.v1beta1.PubsubTarget)) {
return super.equals(obj);
}
com.google.cloud.scheduler.v1beta1.PubsubTarget other =
(com.google.cloud.scheduler.v1beta1.PubsubTarget) obj;
if (!getTopicName().equals(other.getTopicName())) return false;
if (!getData().equals(other.getData())) return false;
if (!internalGetAttributes().equals(other.internalGetAttributes())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TOPIC_NAME_FIELD_NUMBER;
hash = (53 * hash) + getTopicName().hashCode();
hash = (37 * hash) + DATA_FIELD_NUMBER;
hash = (53 * hash) + getData().hashCode();
if (!internalGetAttributes().getMap().isEmpty()) {
hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + internalGetAttributes().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.scheduler.v1beta1.PubsubTarget prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Pub/Sub target. The job will be delivered by publishing a message to
* the given Pub/Sub topic.
* </pre>
*
* Protobuf type {@code google.cloud.scheduler.v1beta1.PubsubTarget}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.scheduler.v1beta1.PubsubTarget)
com.google.cloud.scheduler.v1beta1.PubsubTargetOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.scheduler.v1beta1.TargetProto
.internal_static_google_cloud_scheduler_v1beta1_PubsubTarget_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 4:
return internalGetAttributes();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 4:
return internalGetMutableAttributes();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.scheduler.v1beta1.TargetProto
.internal_static_google_cloud_scheduler_v1beta1_PubsubTarget_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.scheduler.v1beta1.PubsubTarget.class,
com.google.cloud.scheduler.v1beta1.PubsubTarget.Builder.class);
}
// Construct using com.google.cloud.scheduler.v1beta1.PubsubTarget.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
topicName_ = "";
data_ = com.google.protobuf.ByteString.EMPTY;
internalGetMutableAttributes().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.scheduler.v1beta1.TargetProto
.internal_static_google_cloud_scheduler_v1beta1_PubsubTarget_descriptor;
}
@java.lang.Override
public com.google.cloud.scheduler.v1beta1.PubsubTarget getDefaultInstanceForType() {
return com.google.cloud.scheduler.v1beta1.PubsubTarget.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.scheduler.v1beta1.PubsubTarget build() {
com.google.cloud.scheduler.v1beta1.PubsubTarget result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.scheduler.v1beta1.PubsubTarget buildPartial() {
com.google.cloud.scheduler.v1beta1.PubsubTarget result =
new com.google.cloud.scheduler.v1beta1.PubsubTarget(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.scheduler.v1beta1.PubsubTarget result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.topicName_ = topicName_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.data_ = data_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.attributes_ = internalGetAttributes();
result.attributes_.makeImmutable();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.scheduler.v1beta1.PubsubTarget) {
return mergeFrom((com.google.cloud.scheduler.v1beta1.PubsubTarget) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.scheduler.v1beta1.PubsubTarget other) {
if (other == com.google.cloud.scheduler.v1beta1.PubsubTarget.getDefaultInstance())
return this;
if (!other.getTopicName().isEmpty()) {
topicName_ = other.topicName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getData() != com.google.protobuf.ByteString.EMPTY) {
setData(other.getData());
}
internalGetMutableAttributes().mergeFrom(other.internalGetAttributes());
bitField0_ |= 0x00000004;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
topicName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 26:
{
data_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 26
case 34:
{
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> attributes__ =
input.readMessage(
AttributesDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableAttributes()
.getMutableMap()
.put(attributes__.getKey(), attributes__.getValue());
bitField0_ |= 0x00000004;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object topicName_ = "";
/**
*
*
* <pre>
* Required. The name of the Cloud Pub/Sub topic to which messages will
* be published when a job is delivered. The topic name must be in the
* same format as required by Pub/Sub's
* [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest),
* for example `projects/PROJECT_ID/topics/TOPIC_ID`.
*
* The topic must be in the same project as the Cloud Scheduler job.
* </pre>
*
* <code>string topic_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The topicName.
*/
public java.lang.String getTopicName() {
java.lang.Object ref = topicName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
topicName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the Cloud Pub/Sub topic to which messages will
* be published when a job is delivered. The topic name must be in the
* same format as required by Pub/Sub's
* [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest),
* for example `projects/PROJECT_ID/topics/TOPIC_ID`.
*
* The topic must be in the same project as the Cloud Scheduler job.
* </pre>
*
* <code>string topic_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return The bytes for topicName.
*/
public com.google.protobuf.ByteString getTopicNameBytes() {
java.lang.Object ref = topicName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
topicName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the Cloud Pub/Sub topic to which messages will
* be published when a job is delivered. The topic name must be in the
* same format as required by Pub/Sub's
* [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest),
* for example `projects/PROJECT_ID/topics/TOPIC_ID`.
*
* The topic must be in the same project as the Cloud Scheduler job.
* </pre>
*
* <code>string topic_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The topicName to set.
* @return This builder for chaining.
*/
public Builder setTopicName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
topicName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the Cloud Pub/Sub topic to which messages will
* be published when a job is delivered. The topic name must be in the
* same format as required by Pub/Sub's
* [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest),
* for example `projects/PROJECT_ID/topics/TOPIC_ID`.
*
* The topic must be in the same project as the Cloud Scheduler job.
* </pre>
*
* <code>string topic_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @return This builder for chaining.
*/
public Builder clearTopicName() {
topicName_ = getDefaultInstance().getTopicName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the Cloud Pub/Sub topic to which messages will
* be published when a job is delivered. The topic name must be in the
* same format as required by Pub/Sub's
* [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest),
* for example `projects/PROJECT_ID/topics/TOPIC_ID`.
*
* The topic must be in the same project as the Cloud Scheduler job.
* </pre>
*
* <code>string topic_name = 1 [(.google.api.resource_reference) = { ... }</code>
*
* @param value The bytes for topicName to set.
* @return This builder for chaining.
*/
public Builder setTopicNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
topicName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* The message payload for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>bytes data = 3;</code>
*
* @return The data.
*/
@java.lang.Override
public com.google.protobuf.ByteString getData() {
return data_;
}
/**
*
*
* <pre>
* The message payload for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>bytes data = 3;</code>
*
* @param value The data to set.
* @return This builder for chaining.
*/
public Builder setData(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
data_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The message payload for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>bytes data = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearData() {
bitField0_ = (bitField0_ & ~0x00000002);
data_ = getDefaultInstance().getData();
onChanged();
return this;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String> attributes_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetAttributes() {
if (attributes_ == null) {
return com.google.protobuf.MapField.emptyMapField(
AttributesDefaultEntryHolder.defaultEntry);
}
return attributes_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableAttributes() {
if (attributes_ == null) {
attributes_ =
com.google.protobuf.MapField.newMapField(AttributesDefaultEntryHolder.defaultEntry);
}
if (!attributes_.isMutable()) {
attributes_ = attributes_.copy();
}
bitField0_ |= 0x00000004;
onChanged();
return attributes_;
}
public int getAttributesCount() {
return internalGetAttributes().getMap().size();
}
/**
*
*
* <pre>
* Attributes for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>map<string, string> attributes = 4;</code>
*/
@java.lang.Override
public boolean containsAttributes(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetAttributes().getMap().containsKey(key);
}
/** Use {@link #getAttributesMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getAttributes() {
return getAttributesMap();
}
/**
*
*
* <pre>
* Attributes for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>map<string, string> attributes = 4;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getAttributesMap() {
return internalGetAttributes().getMap();
}
/**
*
*
* <pre>
* Attributes for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>map<string, string> attributes = 4;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getAttributesOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetAttributes().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Attributes for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>map<string, string> attributes = 4;</code>
*/
@java.lang.Override
public java.lang.String getAttributesOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetAttributes().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearAttributes() {
bitField0_ = (bitField0_ & ~0x00000004);
internalGetMutableAttributes().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* Attributes for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>map<string, string> attributes = 4;</code>
*/
public Builder removeAttributes(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableAttributes().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getMutableAttributes() {
bitField0_ |= 0x00000004;
return internalGetMutableAttributes().getMutableMap();
}
/**
*
*
* <pre>
* Attributes for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>map<string, string> attributes = 4;</code>
*/
public Builder putAttributes(java.lang.String key, java.lang.String value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableAttributes().getMutableMap().put(key, value);
bitField0_ |= 0x00000004;
return this;
}
/**
*
*
* <pre>
* Attributes for PubsubMessage.
*
* Pubsub message must contain either non-empty data, or at least one
* attribute.
* </pre>
*
* <code>map<string, string> attributes = 4;</code>
*/
public Builder putAllAttributes(java.util.Map<java.lang.String, java.lang.String> values) {
internalGetMutableAttributes().getMutableMap().putAll(values);
bitField0_ |= 0x00000004;
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.scheduler.v1beta1.PubsubTarget)
}
// @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.PubsubTarget)
private static final com.google.cloud.scheduler.v1beta1.PubsubTarget DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.scheduler.v1beta1.PubsubTarget();
}
public static com.google.cloud.scheduler.v1beta1.PubsubTarget getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PubsubTarget> PARSER =
new com.google.protobuf.AbstractParser<PubsubTarget>() {
@java.lang.Override
public PubsubTarget parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<PubsubTarget> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PubsubTarget> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.scheduler.v1beta1.PubsubTarget getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,433 | java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/HttpJsonTargetTcpProxiesStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.TargetTcpProxiesClient.AggregatedListPagedResponse;
import static com.google.cloud.compute.v1.TargetTcpProxiesClient.ListPagedResponse;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.AggregatedListTargetTcpProxiesRequest;
import com.google.cloud.compute.v1.DeleteTargetTcpProxyRequest;
import com.google.cloud.compute.v1.GetTargetTcpProxyRequest;
import com.google.cloud.compute.v1.InsertTargetTcpProxyRequest;
import com.google.cloud.compute.v1.ListTargetTcpProxiesRequest;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.Operation.Status;
import com.google.cloud.compute.v1.SetBackendServiceTargetTcpProxyRequest;
import com.google.cloud.compute.v1.SetProxyHeaderTargetTcpProxyRequest;
import com.google.cloud.compute.v1.TargetTcpProxy;
import com.google.cloud.compute.v1.TargetTcpProxyAggregatedList;
import com.google.cloud.compute.v1.TargetTcpProxyList;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the TargetTcpProxies service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class HttpJsonTargetTcpProxiesStub extends TargetTcpProxiesStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder().add(Operation.getDescriptor()).build();
private static final ApiMethodDescriptor<
AggregatedListTargetTcpProxiesRequest, TargetTcpProxyAggregatedList>
aggregatedListMethodDescriptor =
ApiMethodDescriptor
.<AggregatedListTargetTcpProxiesRequest, TargetTcpProxyAggregatedList>newBuilder()
.setFullMethodName("google.cloud.compute.v1.TargetTcpProxies/AggregatedList")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<AggregatedListTargetTcpProxiesRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/aggregated/targetTcpProxies",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<AggregatedListTargetTcpProxiesRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<AggregatedListTargetTcpProxiesRequest> serializer =
ProtoRestSerializer.create();
if (request.hasFilter()) {
serializer.putQueryParam(fields, "filter", request.getFilter());
}
if (request.hasIncludeAllScopes()) {
serializer.putQueryParam(
fields, "includeAllScopes", request.getIncludeAllScopes());
}
if (request.hasMaxResults()) {
serializer.putQueryParam(
fields, "maxResults", request.getMaxResults());
}
if (request.hasOrderBy()) {
serializer.putQueryParam(fields, "orderBy", request.getOrderBy());
}
if (request.hasPageToken()) {
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
}
if (request.hasReturnPartialSuccess()) {
serializer.putQueryParam(
fields,
"returnPartialSuccess",
request.getReturnPartialSuccess());
}
if (request.hasServiceProjectNumber()) {
serializer.putQueryParam(
fields,
"serviceProjectNumber",
request.getServiceProjectNumber());
}
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<TargetTcpProxyAggregatedList>newBuilder()
.setDefaultInstance(TargetTcpProxyAggregatedList.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<DeleteTargetTcpProxyRequest, Operation>
deleteMethodDescriptor =
ApiMethodDescriptor.<DeleteTargetTcpProxyRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.TargetTcpProxies/Delete")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteTargetTcpProxyRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/targetTcpProxies/{targetTcpProxy}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteTargetTcpProxyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(
fields, "targetTcpProxy", request.getTargetTcpProxy());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteTargetTcpProxyRequest> serializer =
ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(DeleteTargetTcpProxyRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private static final ApiMethodDescriptor<GetTargetTcpProxyRequest, TargetTcpProxy>
getMethodDescriptor =
ApiMethodDescriptor.<GetTargetTcpProxyRequest, TargetTcpProxy>newBuilder()
.setFullMethodName("google.cloud.compute.v1.TargetTcpProxies/Get")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetTargetTcpProxyRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/targetTcpProxies/{targetTcpProxy}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetTargetTcpProxyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(
fields, "targetTcpProxy", request.getTargetTcpProxy());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetTargetTcpProxyRequest> serializer =
ProtoRestSerializer.create();
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<TargetTcpProxy>newBuilder()
.setDefaultInstance(TargetTcpProxy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<InsertTargetTcpProxyRequest, Operation>
insertMethodDescriptor =
ApiMethodDescriptor.<InsertTargetTcpProxyRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.TargetTcpProxies/Insert")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<InsertTargetTcpProxyRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/targetTcpProxies",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<InsertTargetTcpProxyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<InsertTargetTcpProxyRequest> serializer =
ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody(
"targetTcpProxyResource",
request.getTargetTcpProxyResource(),
false))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(InsertTargetTcpProxyRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private static final ApiMethodDescriptor<ListTargetTcpProxiesRequest, TargetTcpProxyList>
listMethodDescriptor =
ApiMethodDescriptor.<ListTargetTcpProxiesRequest, TargetTcpProxyList>newBuilder()
.setFullMethodName("google.cloud.compute.v1.TargetTcpProxies/List")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListTargetTcpProxiesRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/targetTcpProxies",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListTargetTcpProxiesRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListTargetTcpProxiesRequest> serializer =
ProtoRestSerializer.create();
if (request.hasFilter()) {
serializer.putQueryParam(fields, "filter", request.getFilter());
}
if (request.hasMaxResults()) {
serializer.putQueryParam(
fields, "maxResults", request.getMaxResults());
}
if (request.hasOrderBy()) {
serializer.putQueryParam(fields, "orderBy", request.getOrderBy());
}
if (request.hasPageToken()) {
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
}
if (request.hasReturnPartialSuccess()) {
serializer.putQueryParam(
fields,
"returnPartialSuccess",
request.getReturnPartialSuccess());
}
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<TargetTcpProxyList>newBuilder()
.setDefaultInstance(TargetTcpProxyList.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<SetBackendServiceTargetTcpProxyRequest, Operation>
setBackendServiceMethodDescriptor =
ApiMethodDescriptor.<SetBackendServiceTargetTcpProxyRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.TargetTcpProxies/SetBackendService")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<SetBackendServiceTargetTcpProxyRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/targetTcpProxies/{targetTcpProxy}/setBackendService",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<SetBackendServiceTargetTcpProxyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(
fields, "targetTcpProxy", request.getTargetTcpProxy());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<SetBackendServiceTargetTcpProxyRequest> serializer =
ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody(
"targetTcpProxiesSetBackendServiceRequestResource",
request.getTargetTcpProxiesSetBackendServiceRequestResource(),
false))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(SetBackendServiceTargetTcpProxyRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private static final ApiMethodDescriptor<SetProxyHeaderTargetTcpProxyRequest, Operation>
setProxyHeaderMethodDescriptor =
ApiMethodDescriptor.<SetProxyHeaderTargetTcpProxyRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.compute.v1.TargetTcpProxies/SetProxyHeader")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<SetProxyHeaderTargetTcpProxyRequest>newBuilder()
.setPath(
"/compute/v1/projects/{project}/global/targetTcpProxies/{targetTcpProxy}/setProxyHeader",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<SetProxyHeaderTargetTcpProxyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "project", request.getProject());
serializer.putPathParam(
fields, "targetTcpProxy", request.getTargetTcpProxy());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<SetProxyHeaderTargetTcpProxyRequest> serializer =
ProtoRestSerializer.create();
if (request.hasRequestId()) {
serializer.putQueryParam(fields, "requestId", request.getRequestId());
}
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody(
"targetTcpProxiesSetProxyHeaderRequestResource",
request.getTargetTcpProxiesSetProxyHeaderRequestResource(),
false))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(SetProxyHeaderTargetTcpProxyRequest request, Operation response) -> {
StringBuilder opName = new StringBuilder(response.getName());
opName.append(":").append(request.getProject());
return HttpJsonOperationSnapshot.newBuilder()
.setName(opName.toString())
.setMetadata(response)
.setDone(Status.DONE.equals(response.getStatus()))
.setResponse(response)
.setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage())
.build();
})
.build();
private final UnaryCallable<AggregatedListTargetTcpProxiesRequest, TargetTcpProxyAggregatedList>
aggregatedListCallable;
private final UnaryCallable<AggregatedListTargetTcpProxiesRequest, AggregatedListPagedResponse>
aggregatedListPagedCallable;
private final UnaryCallable<DeleteTargetTcpProxyRequest, Operation> deleteCallable;
private final OperationCallable<DeleteTargetTcpProxyRequest, Operation, Operation>
deleteOperationCallable;
private final UnaryCallable<GetTargetTcpProxyRequest, TargetTcpProxy> getCallable;
private final UnaryCallable<InsertTargetTcpProxyRequest, Operation> insertCallable;
private final OperationCallable<InsertTargetTcpProxyRequest, Operation, Operation>
insertOperationCallable;
private final UnaryCallable<ListTargetTcpProxiesRequest, TargetTcpProxyList> listCallable;
private final UnaryCallable<ListTargetTcpProxiesRequest, ListPagedResponse> listPagedCallable;
private final UnaryCallable<SetBackendServiceTargetTcpProxyRequest, Operation>
setBackendServiceCallable;
private final OperationCallable<SetBackendServiceTargetTcpProxyRequest, Operation, Operation>
setBackendServiceOperationCallable;
private final UnaryCallable<SetProxyHeaderTargetTcpProxyRequest, Operation>
setProxyHeaderCallable;
private final OperationCallable<SetProxyHeaderTargetTcpProxyRequest, Operation, Operation>
setProxyHeaderOperationCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonGlobalOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonTargetTcpProxiesStub create(TargetTcpProxiesStubSettings settings)
throws IOException {
return new HttpJsonTargetTcpProxiesStub(settings, ClientContext.create(settings));
}
public static final HttpJsonTargetTcpProxiesStub create(ClientContext clientContext)
throws IOException {
return new HttpJsonTargetTcpProxiesStub(
TargetTcpProxiesStubSettings.newBuilder().build(), clientContext);
}
public static final HttpJsonTargetTcpProxiesStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonTargetTcpProxiesStub(
TargetTcpProxiesStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of HttpJsonTargetTcpProxiesStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonTargetTcpProxiesStub(
TargetTcpProxiesStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new HttpJsonTargetTcpProxiesCallableFactory());
}
/**
* Constructs an instance of HttpJsonTargetTcpProxiesStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonTargetTcpProxiesStub(
TargetTcpProxiesStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonGlobalOperationsStub.create(clientContext, callableFactory);
HttpJsonCallSettings<AggregatedListTargetTcpProxiesRequest, TargetTcpProxyAggregatedList>
aggregatedListTransportSettings =
HttpJsonCallSettings
.<AggregatedListTargetTcpProxiesRequest, TargetTcpProxyAggregatedList>newBuilder()
.setMethodDescriptor(aggregatedListMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeleteTargetTcpProxyRequest, Operation> deleteTransportSettings =
HttpJsonCallSettings.<DeleteTargetTcpProxyRequest, Operation>newBuilder()
.setMethodDescriptor(deleteMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("target_tcp_proxy", String.valueOf(request.getTargetTcpProxy()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetTargetTcpProxyRequest, TargetTcpProxy> getTransportSettings =
HttpJsonCallSettings.<GetTargetTcpProxyRequest, TargetTcpProxy>newBuilder()
.setMethodDescriptor(getMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("target_tcp_proxy", String.valueOf(request.getTargetTcpProxy()));
return builder.build();
})
.build();
HttpJsonCallSettings<InsertTargetTcpProxyRequest, Operation> insertTransportSettings =
HttpJsonCallSettings.<InsertTargetTcpProxyRequest, Operation>newBuilder()
.setMethodDescriptor(insertMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListTargetTcpProxiesRequest, TargetTcpProxyList> listTransportSettings =
HttpJsonCallSettings.<ListTargetTcpProxiesRequest, TargetTcpProxyList>newBuilder()
.setMethodDescriptor(listMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
return builder.build();
})
.build();
HttpJsonCallSettings<SetBackendServiceTargetTcpProxyRequest, Operation>
setBackendServiceTransportSettings =
HttpJsonCallSettings.<SetBackendServiceTargetTcpProxyRequest, Operation>newBuilder()
.setMethodDescriptor(setBackendServiceMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("target_tcp_proxy", String.valueOf(request.getTargetTcpProxy()));
return builder.build();
})
.build();
HttpJsonCallSettings<SetProxyHeaderTargetTcpProxyRequest, Operation>
setProxyHeaderTransportSettings =
HttpJsonCallSettings.<SetProxyHeaderTargetTcpProxyRequest, Operation>newBuilder()
.setMethodDescriptor(setProxyHeaderMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
builder.add("target_tcp_proxy", String.valueOf(request.getTargetTcpProxy()));
return builder.build();
})
.build();
this.aggregatedListCallable =
callableFactory.createUnaryCallable(
aggregatedListTransportSettings, settings.aggregatedListSettings(), clientContext);
this.aggregatedListPagedCallable =
callableFactory.createPagedCallable(
aggregatedListTransportSettings, settings.aggregatedListSettings(), clientContext);
this.deleteCallable =
callableFactory.createUnaryCallable(
deleteTransportSettings, settings.deleteSettings(), clientContext);
this.deleteOperationCallable =
callableFactory.createOperationCallable(
deleteTransportSettings,
settings.deleteOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.getCallable =
callableFactory.createUnaryCallable(
getTransportSettings, settings.getSettings(), clientContext);
this.insertCallable =
callableFactory.createUnaryCallable(
insertTransportSettings, settings.insertSettings(), clientContext);
this.insertOperationCallable =
callableFactory.createOperationCallable(
insertTransportSettings,
settings.insertOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.listCallable =
callableFactory.createUnaryCallable(
listTransportSettings, settings.listSettings(), clientContext);
this.listPagedCallable =
callableFactory.createPagedCallable(
listTransportSettings, settings.listSettings(), clientContext);
this.setBackendServiceCallable =
callableFactory.createUnaryCallable(
setBackendServiceTransportSettings,
settings.setBackendServiceSettings(),
clientContext);
this.setBackendServiceOperationCallable =
callableFactory.createOperationCallable(
setBackendServiceTransportSettings,
settings.setBackendServiceOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.setProxyHeaderCallable =
callableFactory.createUnaryCallable(
setProxyHeaderTransportSettings, settings.setProxyHeaderSettings(), clientContext);
this.setProxyHeaderOperationCallable =
callableFactory.createOperationCallable(
setProxyHeaderTransportSettings,
settings.setProxyHeaderOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(aggregatedListMethodDescriptor);
methodDescriptors.add(deleteMethodDescriptor);
methodDescriptors.add(getMethodDescriptor);
methodDescriptors.add(insertMethodDescriptor);
methodDescriptors.add(listMethodDescriptor);
methodDescriptors.add(setBackendServiceMethodDescriptor);
methodDescriptors.add(setProxyHeaderMethodDescriptor);
return methodDescriptors;
}
@Override
public UnaryCallable<AggregatedListTargetTcpProxiesRequest, TargetTcpProxyAggregatedList>
aggregatedListCallable() {
return aggregatedListCallable;
}
@Override
public UnaryCallable<AggregatedListTargetTcpProxiesRequest, AggregatedListPagedResponse>
aggregatedListPagedCallable() {
return aggregatedListPagedCallable;
}
@Override
public UnaryCallable<DeleteTargetTcpProxyRequest, Operation> deleteCallable() {
return deleteCallable;
}
@Override
public OperationCallable<DeleteTargetTcpProxyRequest, Operation, Operation>
deleteOperationCallable() {
return deleteOperationCallable;
}
@Override
public UnaryCallable<GetTargetTcpProxyRequest, TargetTcpProxy> getCallable() {
return getCallable;
}
@Override
public UnaryCallable<InsertTargetTcpProxyRequest, Operation> insertCallable() {
return insertCallable;
}
@Override
public OperationCallable<InsertTargetTcpProxyRequest, Operation, Operation>
insertOperationCallable() {
return insertOperationCallable;
}
@Override
public UnaryCallable<ListTargetTcpProxiesRequest, TargetTcpProxyList> listCallable() {
return listCallable;
}
@Override
public UnaryCallable<ListTargetTcpProxiesRequest, ListPagedResponse> listPagedCallable() {
return listPagedCallable;
}
@Override
public UnaryCallable<SetBackendServiceTargetTcpProxyRequest, Operation>
setBackendServiceCallable() {
return setBackendServiceCallable;
}
@Override
public OperationCallable<SetBackendServiceTargetTcpProxyRequest, Operation, Operation>
setBackendServiceOperationCallable() {
return setBackendServiceOperationCallable;
}
@Override
public UnaryCallable<SetProxyHeaderTargetTcpProxyRequest, Operation> setProxyHeaderCallable() {
return setProxyHeaderCallable;
}
@Override
public OperationCallable<SetProxyHeaderTargetTcpProxyRequest, Operation, Operation>
setProxyHeaderOperationCallable() {
return setProxyHeaderOperationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
apache/flink | 38,273 | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/MapFunctionITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.functions;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.functions.BuiltInFunctionDefinitions;
import org.apache.flink.types.Row;
import org.apache.flink.util.CollectionUtil;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.Period;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Stream;
import static org.apache.flink.table.api.DataTypes.BIGINT;
import static org.apache.flink.table.api.DataTypes.BOOLEAN;
import static org.apache.flink.table.api.DataTypes.DATE;
import static org.apache.flink.table.api.DataTypes.DECIMAL;
import static org.apache.flink.table.api.DataTypes.DOUBLE;
import static org.apache.flink.table.api.DataTypes.FLOAT;
import static org.apache.flink.table.api.DataTypes.INT;
import static org.apache.flink.table.api.DataTypes.INTERVAL;
import static org.apache.flink.table.api.DataTypes.MAP;
import static org.apache.flink.table.api.DataTypes.MONTH;
import static org.apache.flink.table.api.DataTypes.STRING;
import static org.apache.flink.table.api.DataTypes.TIME;
import static org.apache.flink.table.api.DataTypes.TIMESTAMP;
import static org.apache.flink.table.api.Expressions.$;
import static org.apache.flink.table.api.Expressions.call;
import static org.apache.flink.table.api.Expressions.lit;
import static org.apache.flink.table.api.Expressions.map;
import static org.apache.flink.table.api.Expressions.mapFromArrays;
import static org.apache.flink.util.CollectionUtil.entry;
/** Test {@link BuiltInFunctionDefinitions#MAP} and its return type. */
public class MapFunctionITCase extends BuiltInFunctionTestBase {
private static final LocalDate TEST_DATE_1 = LocalDate.of(1985, 11, 4);
private static final LocalDate TEST_DATE_2 = LocalDate.of(2018, 7, 26);
private static final LocalTime TEST_TIME_1 = LocalTime.of(17, 18, 19);
private static final LocalTime TEST_TIME_2 = LocalTime.of(14, 15, 16);
private static final LocalDateTime TEST_DATE_TIME_1 = LocalDateTime.of(1985, 11, 4, 17, 18, 19);
private static final LocalDateTime TEST_DATE_TIME_2 = LocalDateTime.of(2018, 7, 26, 14, 15, 16);
private static final String A = "a";
private static final String B = "b";
private static final int INTERVAL_1 = -123;
private static final Integer INTERVAL_NULL = null;
@Override
Stream<TestSetSpec> getTestSetSpecs() {
return Stream.of(
mapTestCases(),
mapKeysTestCases(),
mapValuesTestCases(),
mapEntriesTestCases(),
mapFromArraysTestCases(),
mapUnionTestCases())
.flatMap(s -> s);
}
private Stream<TestSetSpec> mapTestCases() {
return Stream.of(
TestSetSpec.forFunction(BuiltInFunctionDefinitions.MAP)
.onFieldsWithData(
1,
2,
3,
4,
new BigDecimal("1.2345"),
new BigDecimal("1.2346"),
true)
.andDataTypes(
INT().notNull(),
INT().notNull(),
INT().notNull(),
INT().notNull(),
DECIMAL(10, 4).notNull(),
DECIMAL(10, 4).notNull(),
BOOLEAN().notNull())
.testResult(
resultSpec(
map(
1, 1, 1, 2, 1, 9, 1, 3, 2, 24, 2, 29, 1, 0, 2, 22,
1, 8, 2, 25, 2, 20),
"MAP[1, 1, 1, 2, 1, 9, 1, 3, 2, 24, 2, 29, 1, 0, 2, 22, 1, 8, 2, 25, 2, 20]",
Map.ofEntries(Map.entry(1, 8), Map.entry(2, 20)),
DataTypes.MAP(INT().notNull(), INT().notNull()).notNull()),
resultSpec(
map($("f0"), $("f0"), $("f0"), $("f1")),
"MAP[f0, f0, f0, f1]",
Collections.singletonMap(1, 2),
DataTypes.MAP(INT().notNull(), INT().notNull()).notNull()),
resultSpec(
map($("f4"), $("f5")),
"MAP[f4, f5]",
Collections.singletonMap(
new BigDecimal("1.2345"), new BigDecimal("1.2346")),
DataTypes.MAP(
DECIMAL(10, 4).notNull(),
DECIMAL(10, 4).notNull())
.notNull()),
resultSpec(
map(
$("f0").plus($("f1")),
$("f2").times($("f2")),
$("f2").minus($("f1")),
$("f3").minus($("f0"))),
"MAP[f0 + f1, f2 * f2, f2 - f1, f3 - f0]",
CollectionUtil.map(
entry(1 + 2, 3 * 3), entry(3 - 2, 4 - 1)),
DataTypes.MAP(INT().notNull(), INT().notNull()).notNull()),
resultSpec(
map(
$("f0"),
$("f1").cast(BIGINT().notNull()),
$("f2"),
$("f3").cast(BIGINT().notNull())),
"MAP[f0, CAST(f1 AS BIGINT), f2, CAST(f3 AS BIGINT)]",
CollectionUtil.map(entry(1, 2L), entry(3, 4L)),
DataTypes.MAP(INT().notNull(), BIGINT().notNull())
.notNull()),
resultSpec(
map($("f6"), $("f6")),
"MAP[f6, f6]",
Collections.singletonMap(true, true),
DataTypes.MAP(BOOLEAN().notNull(), BOOLEAN().notNull())
.notNull()),
resultSpec(
map(
$("f0"),
$("f1").cast(DOUBLE().notNull()),
$("f2"),
$("f3").cast(FLOAT().notNull())),
"MAP[f0, CAST(f1 AS DOUBLE), f2, CAST(f3 AS FLOAT)]",
CollectionUtil.map(entry(1, 2d), entry(3, 4.0)),
DataTypes.MAP(INT().notNull(), DOUBLE().notNull())
.notNull()),
resultSpec(
map($("f4"), $("f5")),
"MAP[f4, f5]",
Collections.singletonMap(
new BigDecimal("1.2345"), new BigDecimal("1.2346")),
DataTypes.MAP(
DECIMAL(10, 4).notNull(),
DECIMAL(10, 4).notNull())
.notNull()),
resultSpec(
map(map($("f0"), $("f1")), map($("f2"), $("f3"))),
"MAP[MAP[f0, f1], MAP[f2, f3]]",
Collections.singletonMap(
Collections.singletonMap(1, 2),
Collections.singletonMap(3, 4)),
DataTypes.MAP(
MAP(
DataTypes.INT().notNull(),
DataTypes.INT().notNull())
.notNull(),
MAP(
DataTypes.INT().notNull(),
DataTypes.INT().notNull())
.notNull())
.notNull())),
TestSetSpec.forFunction(BuiltInFunctionDefinitions.MAP)
.onFieldsWithData(
TEST_DATE_1,
TEST_DATE_2,
TEST_TIME_1,
TEST_TIME_2,
TEST_DATE_TIME_1,
TEST_DATE_TIME_2)
.andDataTypes(
DATE().notNull(),
DATE().notNull(),
TIME().notNull(),
TIME().notNull(),
TIMESTAMP().notNull(),
TIMESTAMP().notNull())
.testResult(
resultSpec(
map($("f0"), $("f2"), $("f1"), $("f3")),
"MAP[f0, f2, f1, f3]",
CollectionUtil.map(
entry(TEST_DATE_1, TEST_TIME_1),
entry(TEST_DATE_2, TEST_TIME_2)),
DataTypes.MAP(DATE().notNull(), TIME().notNull())
.notNull()),
resultSpec(
map($("f2"), $("f4"), $("f3"), $("f5")),
"MAP[f2, f4, f3, f5]",
CollectionUtil.map(
entry(TEST_TIME_1, TEST_DATE_TIME_1),
entry(TEST_TIME_2, TEST_DATE_TIME_2)),
DataTypes.MAP(TIME().notNull(), TIMESTAMP().notNull())
.notNull())),
TestSetSpec.forFunction(BuiltInFunctionDefinitions.MAP)
.onFieldsWithData(A, B, INTERVAL_1, INTERVAL_NULL)
.andDataTypes(
STRING().notNull(),
STRING().notNull(),
INTERVAL(MONTH()),
INTERVAL(MONTH()).nullable())
.testResult(
resultSpec(
map($("f0"), $("f2"), $("f1"), $("f3")),
"MAP[f0, f2, f1, f3]",
CollectionUtil.map(
entry(A, Period.ofMonths(INTERVAL_1)),
entry(B, INTERVAL_NULL)),
DataTypes.MAP(
STRING().notNull(),
INTERVAL(MONTH()).nullable())
.notNull())));
}
private Stream<TestSetSpec> mapKeysTestCases() {
return Stream.of(
TestSetSpec.forFunction(BuiltInFunctionDefinitions.MAP_KEYS)
.onFieldsWithData(
null,
"item",
Collections.singletonMap(1, "value"),
Collections.singletonMap(new Integer[] {1, 2}, "value"))
.andDataTypes(
DataTypes.BOOLEAN().nullable(),
DataTypes.STRING(),
DataTypes.MAP(DataTypes.INT(), DataTypes.STRING()),
DataTypes.MAP(DataTypes.ARRAY(DataTypes.INT()), DataTypes.STRING()))
.testTableApiValidationError(
call("MAP_KEYS", $("f0"), $("f1")),
"Invalid function call:\nMAP_KEYS(BOOLEAN, STRING)")
.testResult(
map(
$("f0").cast(DataTypes.BOOLEAN()),
$("f1").cast(DataTypes.STRING()))
.mapKeys(),
"MAP_KEYS(MAP[CAST(f0 AS BOOLEAN), CAST(f1 AS STRING)])",
new Boolean[] {null},
DataTypes.ARRAY(DataTypes.BOOLEAN()).notNull())
.testResult(
$("f2").mapKeys(),
"MAP_KEYS(f2)",
new Integer[] {1},
DataTypes.ARRAY(DataTypes.INT()))
.testResult(
$("f3").mapKeys(),
"MAP_KEYS(f3)",
new Integer[][] {new Integer[] {1, 2}},
DataTypes.ARRAY(DataTypes.ARRAY(DataTypes.INT()))));
}
private Stream<TestSetSpec> mapValuesTestCases() {
return Stream.of(
TestSetSpec.forFunction(BuiltInFunctionDefinitions.MAP_VALUES)
.onFieldsWithData(
null,
"item",
Collections.singletonMap(1, "value1"),
Collections.singletonMap(
3, Collections.singletonMap(true, "value2")))
.andDataTypes(
DataTypes.BOOLEAN().nullable(),
DataTypes.STRING(),
DataTypes.MAP(DataTypes.INT(), DataTypes.STRING()),
DataTypes.MAP(
DataTypes.INT(),
DataTypes.MAP(DataTypes.BOOLEAN(), DataTypes.STRING())))
.testTableApiValidationError(
call("MAP_VALUES", $("f0"), $("f1")),
"Invalid function call:\nMAP_VALUES(BOOLEAN, STRING)")
.testResult(
map(
$("f1").cast(DataTypes.STRING()),
$("f0").cast(DataTypes.BOOLEAN()))
.mapValues(),
"MAP_VALUES(MAP[CAST(f1 AS STRING), CAST(f0 AS BOOLEAN)])",
new Boolean[] {null},
DataTypes.ARRAY(DataTypes.BOOLEAN()).notNull())
.testResult(
$("f2").mapValues(),
"MAP_VALUES(f2)",
new String[] {"value1"},
DataTypes.ARRAY(DataTypes.STRING()))
.testResult(
$("f3").mapValues(),
"MAP_VALUES(f3)",
new Map[] {Collections.singletonMap(true, "value2")},
DataTypes.ARRAY(
DataTypes.MAP(DataTypes.BOOLEAN(), DataTypes.STRING()))));
}
private Stream<TestSetSpec> mapEntriesTestCases() {
return Stream.of(
TestSetSpec.forFunction(BuiltInFunctionDefinitions.MAP_ENTRIES)
.onFieldsWithData(
null,
"item",
Collections.singletonMap(1, "value1"),
Collections.singletonMap(
3, Collections.singletonMap(true, "value2")))
.andDataTypes(
DataTypes.BOOLEAN().nullable(),
DataTypes.STRING(),
DataTypes.MAP(DataTypes.INT(), DataTypes.STRING()),
DataTypes.MAP(
DataTypes.INT(),
DataTypes.MAP(DataTypes.BOOLEAN(), DataTypes.STRING())))
.testTableApiValidationError(
call("MAP_ENTRIES", $("f0"), $("f1")),
"Invalid function call:\nMAP_ENTRIES(BOOLEAN, STRING)")
.testResult(
map(
$("f0").cast(DataTypes.BOOLEAN()),
$("f1").cast(DataTypes.STRING()))
.mapEntries(),
"MAP_ENTRIES(MAP[CAST(f0 AS BOOLEAN), CAST(f1 AS STRING)])",
new Row[] {Row.of(null, "item")},
DataTypes.ARRAY(
DataTypes.ROW(
DataTypes.FIELD("key", DataTypes.BOOLEAN()),
DataTypes.FIELD(
"value", DataTypes.STRING())))
.notNull())
.testResult(
$("f2").mapEntries(),
"MAP_ENTRIES(f2)",
new Row[] {Row.of(1, "value1")},
DataTypes.ARRAY(
DataTypes.ROW(
DataTypes.FIELD("key", DataTypes.INT()),
DataTypes.FIELD("value", DataTypes.STRING()))))
.testResult(
$("f3").mapEntries(),
"MAP_ENTRIES(f3)",
new Row[] {Row.of(3, Collections.singletonMap(true, "value2"))},
DataTypes.ARRAY(
DataTypes.ROW(
DataTypes.FIELD("key", DataTypes.INT()),
DataTypes.FIELD(
"value",
DataTypes.MAP(
DataTypes.BOOLEAN(),
DataTypes.STRING()))))));
}
private Stream<TestSetSpec> mapFromArraysTestCases() {
return Stream.of(
TestSetSpec.forFunction(BuiltInFunctionDefinitions.MAP_FROM_ARRAYS, "Invalid input")
.onFieldsWithData(null, null, new Integer[] {1}, new Integer[] {1, 2})
.andDataTypes(
DataTypes.ARRAY(DataTypes.BOOLEAN()),
DataTypes.ARRAY(DataTypes.STRING()),
DataTypes.ARRAY(DataTypes.INT()),
DataTypes.ARRAY(DataTypes.INT()))
.testTableApiRuntimeError(
mapFromArrays($("f2"), $("f3")),
"Invalid function MAP_FROM_ARRAYS call:\n"
+ "The length of the keys array 1 is not equal to the length of the values array 2")
.testSqlRuntimeError(
"MAP_FROM_ARRAYS(array[1, 2, 3], array[1, 2])",
"Invalid function MAP_FROM_ARRAYS call:\n"
+ "The length of the keys array 3 is not equal to the length of the values array 2")
.testResult(
mapFromArrays($("f0"), $("f1")),
"MAP_FROM_ARRAYS(f0, f1)",
null,
DataTypes.MAP(DataTypes.BOOLEAN(), DataTypes.STRING())),
TestSetSpec.forFunction(BuiltInFunctionDefinitions.MAP_FROM_ARRAYS)
.onFieldsWithData(
new Integer[] {1, 2},
new String[] {"one", "two"},
new Integer[][] {new Integer[] {1, 2}, new Integer[] {3, 4}})
.andDataTypes(
DataTypes.ARRAY(DataTypes.INT()),
DataTypes.ARRAY(DataTypes.STRING()),
DataTypes.ARRAY(DataTypes.ARRAY(DataTypes.INT())))
.testResult(
mapFromArrays($("f0"), $("f1")),
"MAP_FROM_ARRAYS(f0, f1)",
CollectionUtil.map(entry(1, "one"), entry(2, "two")),
DataTypes.MAP(DataTypes.INT(), DataTypes.STRING()))
.testTableApiResult(
mapFromArrays($("f1"), $("f2")),
CollectionUtil.map(
entry("one", new Integer[] {1, 2}),
entry("two", new Integer[] {3, 4})),
DataTypes.MAP(
DataTypes.STRING(), DataTypes.ARRAY(DataTypes.INT()))));
}
private Stream<TestSetSpec> mapUnionTestCases() {
return Stream.of(
TestSetSpec.forFunction(BuiltInFunctionDefinitions.MAP_UNION)
.onFieldsWithData(
null,
"item",
CollectionUtil.map(
entry("one", new Integer[] {1, 2}),
entry("two", new Integer[] {3, 4})),
CollectionUtil.map(
entry("one", new Integer[] {2, 2}),
entry("two", new Integer[] {8, 4})),
CollectionUtil.map(
entry(2, new Integer[] {1, 2}),
entry(7, new Integer[] {3, 4})),
CollectionUtil.map(entry("one", 2), entry("two", 5)),
new Integer[] {1, 2, 3, 4, 5, null},
new String[] {"1", "3", "5", "7", "9", null},
null,
CollectionUtil.map(entry(1, 2)),
CollectionUtil.map(
entry(1, 3),
entry(2, 4),
entry(lit(null, DataTypes.INT()), 3)),
lit(null, DataTypes.MAP(DataTypes.INT(), DataTypes.INT())))
.andDataTypes(
DataTypes.BOOLEAN().nullable(),
DataTypes.STRING(),
DataTypes.MAP(DataTypes.STRING(), DataTypes.ARRAY(DataTypes.INT())),
DataTypes.MAP(DataTypes.STRING(), DataTypes.ARRAY(DataTypes.INT())),
DataTypes.MAP(DataTypes.INT(), DataTypes.ARRAY(DataTypes.INT())),
DataTypes.MAP(DataTypes.STRING(), DataTypes.INT()),
DataTypes.ARRAY(DataTypes.INT()),
DataTypes.ARRAY(DataTypes.STRING()),
DataTypes.MAP(DataTypes.INT(), DataTypes.INT()),
DataTypes.MAP(DataTypes.INT(), DataTypes.INT()),
DataTypes.MAP(DataTypes.INT(), DataTypes.INT()),
DataTypes.MAP(DataTypes.INT(), DataTypes.INT()))
.testResult(
$("f10").mapUnion(
CollectionUtil.map(
entry(lit(null, DataTypes.INT()), 8))),
"MAP_UNION(f10, MAP[CAST(NULL AS INT), 8])",
CollectionUtil.map(entry(1, 3), entry(2, 4), entry(null, 8)),
DataTypes.MAP(DataTypes.INT(), DataTypes.INT()))
.testResult(
$("f9").mapUnion(
CollectionUtil.map(
entry(lit(null, DataTypes.INT()), 3))),
"MAP_UNION(f9, MAP[CAST(NULL AS INT), 3])",
CollectionUtil.map(entry(null, 3), entry(1, 2)),
DataTypes.MAP(DataTypes.INT(), DataTypes.INT()))
.testResult(
$("f8").mapUnion(
lit(
null,
DataTypes.MAP(
DataTypes.INT(), DataTypes.INT()))),
"MAP_UNION(f8, CAST(NULL AS MAP<INT, INT>))",
null,
DataTypes.MAP(DataTypes.INT(), DataTypes.INT()))
.testResult(
$("f9").mapUnion(
lit(
null,
DataTypes.MAP(
DataTypes.INT(), DataTypes.INT()))),
"MAP_UNION(f9, CAST(NULL AS MAP<INT, INT>))",
null,
DataTypes.MAP(DataTypes.INT(), DataTypes.INT()))
.testResult(
$("f11").mapUnion(CollectionUtil.map(entry(1, 2))),
"MAP_UNION(f11, MAP[1, 2])",
null,
DataTypes.MAP(DataTypes.INT(), DataTypes.INT()))
.testResult(
$("f2").mapUnion(
CollectionUtil.map(
entry("one", new Integer[] {2, 2}),
entry("two", new Integer[] {8, 4}),
entry("three", new Integer[] {1, 2}))),
"MAP_UNION(f2, MAP['one', ARRAY[2,2], 'two', ARRAY[8, 4], 'three', ARRAY[1, 2]])",
CollectionUtil.map(
entry("one", new Integer[] {2, 2}),
entry("two", new Integer[] {8, 4}),
entry("three", new Integer[] {1, 2})),
DataTypes.MAP(DataTypes.STRING(), DataTypes.ARRAY(DataTypes.INT())))
.testResult(
$("f2").mapUnion(
CollectionUtil.map(
entry("one", new Integer[] {2, 2}),
entry("two", new Integer[] {8, 4}),
entry("three", new Integer[] {1, 2})),
CollectionUtil.map(
entry("one", new Integer[] {2, 9}),
entry("four", new Integer[] {8, 4}),
entry("five", new Integer[] {1, 2}))),
"MAP_UNION(f2, MAP['one', ARRAY[2,2], 'two', ARRAY[8, 4], 'three', ARRAY[1, 2]], MAP['one', ARRAY[2,9], 'four', ARRAY[8, 4], 'five', ARRAY[1, 2]])",
CollectionUtil.map(
entry("one", new Integer[] {2, 9}),
entry("two", new Integer[] {8, 4}),
entry("three", new Integer[] {1, 2}),
entry("four", new Integer[] {8, 4}),
entry("five", new Integer[] {1, 2})),
DataTypes.MAP(DataTypes.STRING(), DataTypes.ARRAY(DataTypes.INT())))
.testResult(
$("f4").mapUnion(
CollectionUtil.map(
entry(1, new Integer[] {2, 2}),
entry(2, new Integer[] {8, 4}),
entry(3, new Integer[] {1, 2}))),
"MAP_UNION(f4, MAP[1, ARRAY[2,2], 2, ARRAY[8, 4], 3, ARRAY[1, 2]])",
CollectionUtil.map(
entry(1, new Integer[] {2, 2}),
entry(2, new Integer[] {8, 4}),
entry(3, new Integer[] {1, 2}),
entry(7, new Integer[] {3, 4})),
DataTypes.MAP(DataTypes.INT(), DataTypes.ARRAY(DataTypes.INT())))
.testTableApiValidationError(
$("f2").mapUnion(
CollectionUtil.map(
entry(1, new Integer[] {2, 2}),
entry(2, new Integer[] {8, 4}),
entry(3, new Integer[] {1, 2}))),
"Invalid function call:\n"
+ "MAP_UNION(MAP<STRING, ARRAY<INT>>, MAP<INT NOT NULL, ARRAY<INT NOT NULL> NOT NULL> NOT NULL)")
.testSqlValidationError(
"MAP_UNION(f2, MAP[1, ARRAY[2,2], 2, ARRAY[8, 4], 3, ARRAY[1, 2]])",
"SQL validation failed. Invalid function call:\n"
+ "MAP_UNION(MAP<STRING, ARRAY<INT>>, MAP<INT NOT NULL, ARRAY<INT NOT NULL> NOT NULL> NOT NULL)")
.testTableApiValidationError(
$("f0").mapUnion(
CollectionUtil.map(
entry(1, new Integer[] {2, 2}),
entry(2, new Integer[] {8, 4}),
entry(3, new Integer[] {1, 2}))),
"Invalid function call:\n"
+ "MAP_UNION(BOOLEAN, MAP<INT NOT NULL, ARRAY<INT NOT NULL> NOT NULL> NOT NULL)")
.testSqlValidationError(
"MAP_UNION(f0, MAP[1, ARRAY[2,2], 2, ARRAY[8, 4], 3, ARRAY[1, 2]])",
"SQL validation failed. Invalid function call:\n"
+ "MAP_UNION(BOOLEAN, MAP<INT NOT NULL, ARRAY<INT NOT NULL> NOT NULL> NOT NULL)")
.testTableApiValidationError(
$("f1").mapUnion(
CollectionUtil.map(
entry(1, new Integer[] {2, 2}),
entry(2, new Integer[] {8, 4}),
entry(3, new Integer[] {1, 2}))),
"Invalid function call:\n"
+ "MAP_UNION(STRING, MAP<INT NOT NULL, ARRAY<INT NOT NULL> NOT NULL> NOT NULL)")
.testSqlValidationError(
"MAP_UNION(f1, MAP[1, ARRAY[2,2], 2, ARRAY[8, 4], 3, ARRAY[1, 2]])",
"SQL validation failed. Invalid function call:\n"
+ "MAP_UNION(STRING, MAP<INT NOT NULL, ARRAY<INT NOT NULL> NOT NULL> NOT NULL)")
.testTableApiValidationError(
$("f2").mapUnion(
CollectionUtil.map(
entry("1", 1),
entry("2", 2),
entry("3", 3))),
"Invalid input arguments. Expected signatures are:\n"
+ "MAP_UNION(<COMMON>, <COMMON>...)")
.testSqlValidationError(
"MAP_UNION(f2, MAP['1', 1, '2', 2, '3', 3])",
"Invalid input arguments. Expected signatures are:\n"
+ "MAP_UNION(<COMMON>, <COMMON>...)")
.testTableApiValidationError(
$("f5").mapUnion(new String[] {"123"}),
"Invalid input arguments. Expected signatures are:\n"
+ "MAP_UNION(<COMMON>, <COMMON>...)")
.testSqlValidationError(
"MAP_UNION(f5, ARRAY['123'])",
"Invalid input arguments. Expected signatures are:\n"
+ "MAP_UNION(<COMMON>, <COMMON>...)")
.testTableApiValidationError(
$("f6").mapUnion(
CollectionUtil.map(
entry("1", 1),
entry("2", 2),
entry("3", 3))),
"Invalid input arguments. Expected signatures are:\n"
+ "MAP_UNION(<COMMON>, <COMMON>...)")
.testSqlValidationError(
"MAP_UNION(f6, MAP['1', 1, '2', 2, '3', 3])",
"Invalid input arguments. Expected signatures are:\n"
+ "MAP_UNION(<COMMON>, <COMMON>...)")
.testTableApiValidationError(
$("f7").mapUnion(new Integer[] {1, 2, 3, 4}),
"Invalid input arguments. Expected signatures are:\n"
+ "MAP_UNION(<COMMON>, <COMMON>...)")
.testSqlValidationError(
"MAP_UNION(f7, ARRAY[1, 2, 3, 4])",
"Invalid input arguments. Expected signatures are:\n"
+ "MAP_UNION(<COMMON>, <COMMON>...)"));
}
}
|
apache/incubator-retired-wave | 37,875 | wave/src/main/java/org/waveprotocol/wave/model/util/CollectionUtils.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.waveprotocol.wave.model.util;
import org.waveprotocol.wave.model.util.ReadableStringMap.ProcV;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Set;
/**
* Utilities related to StringMap, StringSet, and CollectionFactory.
*
* @author ohler@google.com (Christian Ohler)
*/
public class CollectionUtils {
private CollectionUtils() {
}
public static final DataDomain<ReadableStringSet, StringSet> STRING_SET_DOMAIN =
new DataDomain<ReadableStringSet, StringSet>() {
@Override
public void compose(StringSet target, ReadableStringSet changes, ReadableStringSet base) {
target.clear();
target.addAll(base);
target.addAll(changes);
}
@Override
public StringSet empty() {
return createStringSet();
}
@Override
public ReadableStringSet readOnlyView(StringSet modifiable) {
return modifiable;
}
};
public static final DataDomain<ReadableStringMap<Object>, StringMap<Object>> STRING_MAP_DOMAIN =
new DataDomain<ReadableStringMap<Object>, StringMap<Object>>() {
@Override
public void compose(StringMap<Object> target, ReadableStringMap<Object> changes,
ReadableStringMap<Object> base) {
target.clear();
target.putAll(base);
target.putAll(changes);
}
@Override
public StringMap<Object> empty() {
return createStringMap();
}
@Override
public ReadableStringMap<Object> readOnlyView(StringMap<Object> modifiable) {
return modifiable;
}
};
@SuppressWarnings("unchecked")
public static <T> DataDomain<StringMap<T>, StringMap<T>> stringMapDomain() {
return (DataDomain) STRING_MAP_DOMAIN;
}
@SuppressWarnings("unchecked")
public static <T> DataDomain<Set<T>, Set<T>> hashSetDomain() {
return (DataDomain) HASH_SET_DOMAIN;
}
public static final DataDomain<Set<Object>, Set<Object>> HASH_SET_DOMAIN =
new DataDomain<Set<Object>, Set<Object>>() {
@Override
public void compose(Set<Object> target, Set<Object> changes, Set<Object> base) {
target.clear();
target.addAll(changes);
target.addAll(base);
}
@Override
public Set<Object> empty() {
return new HashSet<Object>();
}
@Override
public Set<Object> readOnlyView(Set<Object> modifiable) {
return Collections.unmodifiableSet(modifiable);
}
};
/**
* An adapter that turns a java.util.Map<String, V> into a StringMap<V>.
*
* @author ohler@google.com (Christian Ohler)
*
* @param <V> type of values in the map
*/
private static final class StringMapAdapter<V> implements StringMap<V> {
private final Map<String, V> backend;
private StringMapAdapter(Map<String, V> backend) {
Preconditions.checkNotNull(backend, "Attempt to adapt a null map");
this.backend = backend;
}
@Override
public void putAll(ReadableStringMap<V> pairsToAdd) {
// TODO(ohler): check instanceof here and implement a fallback.
backend.putAll(((StringMapAdapter<V>) pairsToAdd).backend);
}
@Override
public void putAll(Map<String, V> sourceMap) {
Preconditions.checkArgument(!sourceMap.containsKey(null),
"Source map must not contain a null key");
backend.putAll(sourceMap);
}
@Override
public void clear() {
backend.clear();
}
@Override
public void put(String key, V value) {
Preconditions.checkNotNull(key, "StringMap cannot contain null keys");
backend.put(key, value);
}
@Override
public void remove(String key) {
Preconditions.checkNotNull(key, "StringMap cannot contain null keys");
backend.remove(key);
}
@Override
public boolean containsKey(String key) {
Preconditions.checkNotNull(key, "StringMap cannot contain null keys");
return backend.containsKey(key);
}
@Override
public V getExisting(String key) {
Preconditions.checkNotNull(key, "StringMap cannot contain null keys");
if (!backend.containsKey(key)) {
// Not using Preconditions.checkState to avoid unecessary string concatenation
throw new IllegalStateException("getExisting: Key '" + key + "' is not in map");
}
return backend.get(key);
}
@Override
public V get(String key) {
Preconditions.checkNotNull(key, "StringMap cannot contain null keys");
return backend.get(key);
}
@Override
public V get(String key, V defaultValue) {
Preconditions.checkNotNull(key, "StringMap cannot contain null keys");
if (backend.containsKey(key)) {
return backend.get(key);
} else {
return defaultValue;
}
}
@Override
public boolean isEmpty() {
return backend.isEmpty();
}
@Override
public void each(ProcV<? super V> callback) {
for (Map.Entry<String, V> entry : backend.entrySet()) {
callback.apply(entry.getKey(), entry.getValue());
}
}
@Override
public void filter(EntryFilter<? super V> filter) {
for (Iterator<Map.Entry<String, V>> iterator = backend.entrySet().iterator();
iterator.hasNext();) {
Map.Entry<String, V> entry = iterator.next();
if (filter.apply(entry.getKey(), entry.getValue())) {
// entry stays
} else {
iterator.remove();
}
}
}
@Override
public int countEntries() {
return backend.size();
}
@Override
public String someKey() {
return isEmpty() ? null : backend.keySet().iterator().next();
}
@Override
public ReadableStringSet keySet() {
return new StringSetAdapter(backend.keySet());
}
@Override
public String toString() {
return backend.toString();
}
// NOTE(patcoleman): equals() and hashCode() should not be implemented in this adaptor, as
// they are unsupported in the javascript collections.
}
/**
* An adapter that turns a java.util.Map<Double, V> into a NumberMap<V>.
*
* @param <V> type of values in the map
*/
private static final class NumberMapAdapter<V> implements NumberMap<V> {
private final Map<Double, V> backend;
private NumberMapAdapter(Map<Double, V> backend) {
Preconditions.checkNotNull(backend, "Attempt to adapt a null map");
this.backend = backend;
}
@Override
public void putAll(ReadableNumberMap<V> pairsToAdd) {
// TODO(ohler): check instanceof here and implement a fallback.
backend.putAll(((NumberMapAdapter<V>) pairsToAdd).backend);
}
@Override
public void putAll(Map<Double, V> sourceMap) {
backend.putAll(sourceMap);
}
@Override
public void clear() {
backend.clear();
}
@Override
public void put(double key, V value) {
backend.put(key, value);
}
@Override
public void remove(double key) {
backend.remove(key);
}
@Override
public boolean containsKey(double key) {
return backend.containsKey(key);
}
@Override
public V getExisting(double key) {
assert backend.containsKey(key);
return backend.get(key);
}
@Override
public V get(double key) {
return backend.get(key);
}
@Override
public V get(double key, V defaultValue) {
if (backend.containsKey(key)) {
return backend.get(key);
} else {
return defaultValue;
}
}
@Override
public boolean isEmpty() {
return backend.isEmpty();
}
@Override
public void each(ProcV<V> callback) {
for (Map.Entry<Double, V> entry : backend.entrySet()) {
callback.apply(entry.getKey(), entry.getValue());
}
}
@Override
public void filter(EntryFilter<V> filter) {
for (Iterator<Map.Entry<Double, V>> iterator = backend.entrySet().iterator();
iterator.hasNext();) {
Map.Entry<Double, V> entry = iterator.next();
if (filter.apply(entry.getKey(), entry.getValue())) {
// entry stays
} else {
iterator.remove();
}
}
}
@Override
public int countEntries() {
return backend.size();
}
@Override
public String toString() {
return backend.toString();
}
// NOTE(patcoleman): equals() and hashCode() should not be implemented in this adaptor, as
// they are unsupported in the javascript collections.
}
/**
* An adapter that turns a java.util.Map<Integer, V> into an IntMap<V>.
*
* @param <V> type of values in the map
*/
private static final class IntMapAdapter<V> implements IntMap<V> {
private final Map<Integer, V> backend;
private IntMapAdapter(Map<Integer, V> backend) {
Preconditions.checkNotNull(backend, "Attempt to adapt a null map");
this.backend = backend;
}
@Override
public void putAll(ReadableIntMap<V> pairsToAdd) {
// TODO(ohler): check instanceof here and implement a fallback.
backend.putAll(((IntMapAdapter<V>) pairsToAdd).backend);
}
@Override
public void putAll(Map<Integer, V> sourceMap) {
backend.putAll(sourceMap);
}
@Override
public void clear() {
backend.clear();
}
@Override
public void put(int key, V value) {
backend.put(key, value);
}
@Override
public void remove(int key) {
backend.remove(key);
}
@Override
public boolean containsKey(int key) {
return backend.containsKey(key);
}
@Override
public V getExisting(int key) {
assert backend.containsKey(key);
return backend.get(key);
}
@Override
public V get(int key) {
return backend.get(key);
}
@Override
public V get(int key, V defaultValue) {
if (backend.containsKey(key)) {
return backend.get(key);
} else {
return defaultValue;
}
}
@Override
public boolean isEmpty() {
return backend.isEmpty();
}
@Override
public void each(ProcV<V> callback) {
for (Map.Entry<Integer, V> entry : backend.entrySet()) {
callback.apply(entry.getKey(), entry.getValue());
}
}
@Override
public void filter(EntryFilter<V> filter) {
for (Iterator<Map.Entry<Integer, V>> iterator = backend.entrySet().iterator();
iterator.hasNext();) {
Map.Entry<Integer, V> entry = iterator.next();
if (filter.apply(entry.getKey(), entry.getValue())) {
// entry stays
} else {
iterator.remove();
}
}
}
@Override
public int countEntries() {
return backend.size();
}
@Override
public String toString() {
return backend.toString();
}
// NOTE(patcoleman): equals() and hashCode() should not be implemented in this adaptor, as
// they are unsupported in the javascript collections.
}
/**
* An adapter that turns a java.util.Set<String> into a StringSet.
*
* @author ohler@google.com (Christian Ohler)
*/
private static class StringSetAdapter implements StringSet {
private final Set<String> backend;
private StringSetAdapter(Set<String> backend) {
Preconditions.checkNotNull(backend, "Attempt to adapt a null set");
this.backend = backend;
}
@Override
public void add(String s) {
Preconditions.checkNotNull(s, "StringSet cannot contain null values");
backend.add(s);
}
@Override
public void clear() {
backend.clear();
}
@Override
public boolean contains(String s) {
Preconditions.checkNotNull(s, "StringSet cannot contain null values");
return backend.contains(s);
}
@Override
public void remove(String s) {
Preconditions.checkNotNull(s, "StringSet cannot contain null values");
backend.remove(s);
}
@Override
public boolean isEmpty() {
return backend.isEmpty();
}
@Override
public void each(ReadableStringSet.Proc callback) {
for (String s : backend) {
callback.apply(s);
}
}
@Override
public boolean isSubsetOf(Set<String> set) {
return set.containsAll(backend);
}
@Override
public boolean isSubsetOf(final ReadableStringSet other) {
for (String s : backend) {
if (!other.contains(s)) {
return false;
}
}
return true;
}
@Override
public void addAll(ReadableStringSet set) {
backend.addAll(((StringSetAdapter) set).backend);
}
@Override
public void removeAll(ReadableStringSet set) {
backend.removeAll(((StringSetAdapter) set).backend);
}
@Override
public void filter(StringPredicate filter) {
for (Iterator<String> iterator = backend.iterator(); iterator.hasNext();) {
String x = iterator.next();
if (filter.apply(x)) {
// entry stays
} else {
iterator.remove();
}
}
}
@Override
public String someElement() {
return isEmpty() ? null : backend.iterator().next();
}
@Override
public String toString() {
return backend.toString();
}
@Override
public int countEntries() {
return backend.size();
}
}
/**
* An adapter that wraps a {@link IdentityHashMap}, presenting it as an
* {@link IdentitySet}.
*/
private static class IdentitySetAdapter<T> implements IdentitySet<T> {
private final Map<T, T> backend = new IdentityHashMap<T, T>();
private IdentitySetAdapter() {
}
@Override
public void add(T x) {
Preconditions.checkNotNull(x, "IdentitySet cannot contain null values");
// Note: Boxed primitives, and String, are disallowed. There are special
// purpose maps for those key types, and the equality semantics between
// the boxed primitives of Javascript and Java are dubious at best.
if (x instanceof String || x instanceof Integer || x instanceof Double || x instanceof Long
|| x instanceof Boolean) {
throw new UnsupportedOperationException(
"Should NOT use boxed primitives with IdentitySet");
}
backend.put(x, x);
}
@Override
public void clear() {
backend.clear();
}
@Override
public boolean contains(T s) {
Preconditions.checkNotNull(s, "IdentitySet cannot contain null values");
return backend.containsKey(s);
}
@Override
public void remove(T s) {
Preconditions.checkNotNull(s, "IdentitySet cannot contain null values");
backend.remove(s);
}
@Override
public boolean isEmpty() {
return backend.isEmpty();
}
@Override
public T someElement() {
for (T e : backend.keySet()) {
return e;
}
return null;
}
@Override
public void each(Proc<? super T> procedure) {
for (T s : backend.keySet()) {
procedure.apply(s);
}
}
@Override
public String toString() {
return backend.toString();
}
@Override
public int countEntries() {
return backend.size();
}
}
private static class NumberPriorityQueueAdapter implements NumberPriorityQueue {
private final Queue<Double> queue;
private NumberPriorityQueueAdapter(Queue<Double> queue) {
this.queue = queue;
}
@Override
public boolean offer(double e) {
return queue.offer(e);
}
@Override
public double peek() {
return queue.peek();
}
@Override
public double poll() {
return queue.poll();
}
@Override
public int size() {
return queue.size();
}
}
/**
* An adapter that wraps a java.util.IdentityHashMap<K, V> into an
* IdentityMap<K, V>. Note that this is a simple map, so 'identity' is defined
* by the hashCode/equals of K instances.
*
* @param <K> type of keys in the map.
* @param <V> type of values in the map
*/
private static class IdentityHashMapAdapter<K, V> implements IdentityMap<K, V> {
private final Map<K, V> backend = new IdentityHashMap<K, V>();
private IdentityHashMapAdapter() {
}
@Override
public V get(K key) {
return backend.get(key);
}
@Override
public boolean has(K key) {
return backend.containsKey(key);
}
@Override
public void put(K key, V value) {
// Note: Boxed primitives, and String, are disallowed. See explanation in
// IdentitySetAdapter.
if (key instanceof String || key instanceof Integer || key instanceof Double
|| key instanceof Long || key instanceof Boolean) {
throw new UnsupportedOperationException(
"Should NOT use boxed primitives as key with identity map");
}
backend.put(key, value);
}
@Override
public void remove(K key) {
removeAndReturn(key);
}
@Override
public V removeAndReturn(K key) {
return backend.remove(key);
}
@Override
public void clear() {
backend.clear();
}
@Override
public boolean isEmpty() {
return backend.isEmpty();
}
@Override
public void each(ProcV<? super K, ? super V> proc) {
for (Map.Entry<K, V> entry : backend.entrySet()) {
proc.apply(entry.getKey(), entry.getValue());
}
}
@Override
public <R> R reduce(R initial, Reduce<? super K, ? super V, R> proc) {
R reduction = initial;
for (Map.Entry<K, V> entry : backend.entrySet()) {
reduction = proc.apply(reduction, entry.getKey(), entry.getValue());
}
return reduction;
}
@Override
public String toString() {
return backend.toString();
}
@Override
public int countEntries() {
return backend.size();
}
// NOTE(patcoleman): equals() and hashCode() should not be implemented in this adaptor, as
// they are unsupported in the javascript collections.
}
/**
* An implementation of CollectionFactory based on java.util.HashSet and
* java.util.HashMap.
*
* @author ohler@google.com (Christian Ohler)
*/
private static class HashCollectionFactory implements CollectionFactory {
@Override
public <V> StringMap<V> createStringMap() {
return CollectionUtils.adaptStringMap(new HashMap<String, V>());
}
@Override
public <V> NumberMap<V> createNumberMap() {
return CollectionUtils.adaptNumberMap(new HashMap<Double, V>());
}
@Override
public <V> IntMap<V> createIntMap() {
return CollectionUtils.adaptIntMap(new HashMap<Integer, V>());
}
@Override
public StringSet createStringSet() {
return CollectionUtils.adaptStringSet(new HashSet<String>());
}
@Override
public <T> IdentitySet<T> createIdentitySet() {
return new IdentitySetAdapter<T>();
}
@Override
public <E> Queue<E> createQueue() {
return new LinkedList<E>();
}
@Override
public NumberPriorityQueue createPriorityQueue() {
return CollectionUtils.adaptNumberPriorityQueue(new PriorityQueue<Double>());
}
@Override
public <K, V> IdentityMap<K, V> createIdentityMap() {
return new IdentityHashMapAdapter<K, V>();
}
}
private static final HashCollectionFactory HASH_COLLECTION_FACTORY =
new HashCollectionFactory();
private static CollectionFactory defaultCollectionFactory = HASH_COLLECTION_FACTORY;
/**
* Implements a persistently empty string map that throws exceptions on
* attempt to add keys.
*/
private static final class EmptyStringMap<V> implements StringMap<V> {
@Override
public void clear() {
// Success as the map is already empty.
}
@Override
public void filter(StringMap.EntryFilter<? super V> filter) {
}
@Override
public void put(String key, V value) {
throw new UnsupportedOperationException();
}
@Override
public void putAll(ReadableStringMap<V> pairsToAdd) {
throw new UnsupportedOperationException();
}
@Override
public void putAll(Map<String, V> sourceMap) {
throw new UnsupportedOperationException();
}
@Override
public void remove(String key) {
}
@Override
public boolean containsKey(String key) {
return false;
}
@Override
public int countEntries() {
return 0;
}
@Override
public void each(org.waveprotocol.wave.model.util.ReadableStringMap.ProcV<? super V> callback) {
}
@Override
public V get(String key, V defaultValue) {
return null;
}
@Override
public V get(String key) {
return null;
}
@Override
public V getExisting(String key) {
throw new UnsupportedOperationException();
}
@Override
public boolean isEmpty() {
return true;
}
@Override
public String someKey() {
return null;
}
@Override
public ReadableStringSet keySet() {
// TODO(danilatos/ohler): Implement an immutable EMPTY_SET
return CollectionUtils.createStringSet();
}
}
private static final EmptyStringMap<Object> EMPTY_MAP = new EmptyStringMap<Object>();
private static final IdentityMap<Object, Object> EMPTY = new IdentityMap<Object, Object>() {
@Override
public void clear() {
}
@Override
public int countEntries() {
return 0;
}
@Override
public void each(ProcV<Object, Object> proc) {
}
@Override
public Object get(Object key) {
return null;
}
@Override
public boolean has(Object key) {
return false;
}
@Override
public boolean isEmpty() {
return true;
}
@Override
public void put(Object key, Object value) {
throw new UnsupportedOperationException();
}
@Override
public <R> R reduce(R initial, Reduce<Object, Object, R> proc) {
return initial;
}
@Override
public void remove(Object key) {
throw new UnsupportedOperationException();
}
@Override
public Object removeAndReturn(Object key) {
throw new UnsupportedOperationException();
}
};
//
// Plain old collections.
//
/**
* Creates an empty {@code HashSet}.
*/
public static <E> HashSet<E> newHashSet() {
return new HashSet<E>();
}
/**
* Creates a {@code HashSet} instance containing the given elements.
*
* @param elements the elements that the set should contain
* @return a newly created {@code HashSet} containing those elements.
*/
@SuppressWarnings("unchecked")
public static <E> HashSet<E> newHashSet(E... elements) {
int capacity = Math.max((int) (elements.length / .75f) + 1, 16);
HashSet<E> set = new HashSet<E>(capacity);
Collections.addAll(set, elements);
return set;
}
/**
* Creates a {@code HashSet} instance containing the given elements.
*
* @param elements the elements that the set should contain
* @return a newly created {@code HashSet} containing those elements.
*/
public static <E> HashSet<E> newHashSet(Collection<? extends E> elements) {
return new HashSet<E>(elements);
}
/**
* Creates an empty immutable set.
*
* @return a newly created set containing those elements.
*/
public static <E> Set<E> immutableSet() {
// TODO(anorth): optimise to a truly immutable set.
return Collections.unmodifiableSet(CollectionUtils.<E>newHashSet());
}
/**
* Creates an immutable set containing the given elements.
*
* @param elements the elements that the set should contain
* @return a newly created set containing those elements.
*/
public static <E> Set<E> immutableSet(Collection<? extends E> elements) {
// TODO(anorth): optimise to a truly immutable set.
return Collections.unmodifiableSet(newHashSet(elements));
}
/**
* Creates an immutable set containing the given elements.
*
* @param elements the elements that the set should contain
* @return a newly created set containing those elements.
*/
@SuppressWarnings("unchecked")
public static <E> Set<E> immutableSet(E... elements) {
// TODO(anorth): optimise to a truly immutable set.
return Collections.unmodifiableSet(newHashSet(elements));
}
/** Creates an empty {@link HashMap}. */
public static <K, V> HashMap<K, V> newHashMap() {
return new HashMap<K, V>();
}
/**
* Creates a {@link HashMap} containing the elements in the given map.
*/
public static <K, V> HashMap<K, V> newHashMap(Map<? extends K, ? extends V> map) {
return new HashMap<K, V>(map);
}
/** Creates a new immutable map with one entry. */
public static <K, V> Map<K, V> immutableMap(K k1, V v1) {
// TODO(anorth): optimise to a truly immutable map.
return Collections.singletonMap(k1, v1);
}
/** Creates a new immutable map with the given entries. */
public static <K, V> Map<K, V> immutableMap(K k1, V v1, K k2, V v2) {
Map<K, V> map = newHashMap();
map.put(k1, v1);
map.put(k2, v2);
return Collections.unmodifiableMap(map);
}
/** Creates a new, empty linked list. */
public static <T> LinkedList<T> newLinkedList() {
return new LinkedList<T>();
}
/** Creates a new linked list containing elements provided by an iterable. */
public static <T> LinkedList<T> newLinkedList(Iterable<? extends T> elements) {
LinkedList<T> list = newLinkedList();
for (T e : elements) {
list.add(e);
}
return list;
}
/** Creates a new linked list containing the provided elements. */
@SuppressWarnings("unchecked")
public static <T> LinkedList<T> newLinkedList(T... elements) {
return newLinkedList(Arrays.asList(elements));
}
/** Creates a new, empty array list. */
public static <T> ArrayList<T> newArrayList() {
return new ArrayList<T>();
}
/** Creates a new array list containing elements provided by an iterable. */
public static <T> ArrayList<T> newArrayList(Iterable<? extends T> elements) {
ArrayList<T> list = newArrayList();
for (T e : elements) {
list.add(e);
}
return list;
}
/** Creates a new array list containing the provided elements. */
@SuppressWarnings("unchecked")
public static <T> ArrayList<T> newArrayList(T... elements) {
return newArrayList(Arrays.asList(elements));
}
//
// String-based collections.
//
/**
* Sets the default collection factory.
*
* This is used in the GWT client initialization code to plug in the JSO-based
* collection factory. There shouldn't be any need to call this from other
* places.
*/
public static void setDefaultCollectionFactory(CollectionFactory f) {
defaultCollectionFactory = f;
}
/**
* Returns a CollectionFactory based on HashSet and HashMap from java.util.
*
* Note: getCollectionFactory() is probably a better choice.
*/
public static CollectionFactory getHashCollectionFactory() {
return HASH_COLLECTION_FACTORY;
}
/**
* Returns the default CollectionFactory.
*/
public static CollectionFactory getCollectionFactory() {
return defaultCollectionFactory;
}
/**
* Creates a new StringMap using the default collection factory.
*/
public static <V> StringMap<V> createStringMap() {
return CollectionUtils.getCollectionFactory().createStringMap();
}
/**
* @returns an immutable empty map object. Always reuses the same object, does
* not create new ones.
*/
@SuppressWarnings("unchecked")
public static <V> StringMap<V> emptyMap() {
return (StringMap<V>) EMPTY_MAP;
}
/**
* @returns an immutable empty map object. Always reuses the same object, does
* not create new ones.
*/
@SuppressWarnings("unchecked")
public static <K, V> IdentityMap<K, V> emptyIdentityMap() {
return (IdentityMap<K, V>) EMPTY;
}
/**
* Creates a new NumberMap using the default collection factory.
*/
public static <V> NumberMap<V> createNumberMap() {
return CollectionUtils.getCollectionFactory().createNumberMap();
}
/**
* Creates a new NumberMap using the default collection factory.
*/
public static <V> IntMap<V> createIntMap() {
return CollectionUtils.getCollectionFactory().createIntMap();
}
/**
* Creates a new queue using the default collection factory.
*/
public static <V> Queue<V> createQueue() {
return CollectionUtils.getCollectionFactory().createQueue();
}
/**
* Creates a new priority queue using the default collection factory.
*/
public static NumberPriorityQueue createPriorityQueue() {
return CollectionUtils.getCollectionFactory().createPriorityQueue();
}
/**
* Creates a new IdentityMap using the default collection factory.
*/
public static <K, V> IdentityMap<K, V> createIdentityMap() {
return CollectionUtils.getCollectionFactory().createIdentityMap();
}
/**
* Creates a new IdentitySet using the default collection factory.
*/
public static <V> IdentitySet<V> createIdentitySet() {
return CollectionUtils.getCollectionFactory().createIdentitySet();
}
/**
* Creates a new, immutable, singleton IdentitySet.
*/
public static <V> ReadableIdentitySet<V> singleton(final V value) {
Preconditions.checkNotNull(value, "Can not create singleton of null");
return new ReadableIdentitySet<V>() {
@Override
public boolean contains(V s) {
// Note that == is used, not .equals(), because this is an identity set.
return value == s;
}
@Override
public int countEntries() {
return 1;
}
@Override
public void each(Proc<? super V> procedure) {
procedure.apply(value);
}
@Override
public V someElement() {
return value;
}
@Override
public boolean isEmpty() {
return false;
}
};
}
/**
* Creates a new StringSet using the default collection factory.
*/
public static StringSet createStringSet() {
return getCollectionFactory().createStringSet();
}
public static <V> StringMap<V> copyStringMap(ReadableStringMap<V> m) {
StringMap<V> copy = createStringMap();
copy.putAll(m);
return copy;
}
public static StringSet copyStringSet(ReadableStringSet s) {
StringSet copy = createStringSet();
copy.addAll(s);
return copy;
}
/**
* Adds all entries from the source map to the target map.
*
* @return the target map, for convenience
*/
public static <V, M extends Map<String, V>> M copyToJavaMap(ReadableStringMap<V> source,
final M target) {
source.each(new StringMap.ProcV<V>() {
@Override
public void apply(String key, V value) {
target.put(key, value);
}
});
return target;
}
/**
* Adds all entries from the source map to the target map. NOTE(patcoleman):
* please only call from assertions/testing code. Ideally everything should be
* ignorant of the java.util.Map implementations as the collection API here
* becomes more useful.
*
* @return java.util.Map version of our IdentityMap
*/
public static <K, V> Map<K, V> copyToJavaIdentityMapForTesting(IdentityMap<K, V> source) {
final Map<K, V> result = new IdentityHashMap<K, V>();
source.each(new IdentityMap.ProcV<K, V>() {
@Override
public void apply(K key, V value) {
result.put(key, value);
}
});
return result;
}
/**
* Creates a new java set with the same contents as the source StringSet.
*/
public static <V> Map<String, V> newJavaMap(ReadableStringMap<V> source) {
return copyToJavaMap(source, new HashMap<String, V>());
}
/**
* Adds all elements from the source set to the target collection.
*
* @return the target collection, for convenience
*/
public static <C extends Collection<String>> C copyToJavaCollection(
ReadableStringSet source, final C target) {
source.each(new StringSet.Proc() {
@Override
public void apply(String element) {
target.add(element);
}
});
return target;
}
/**
* Adds all values from the source map to the target collection.
*
* @return the target collection, for convenience
*/
public static <T, C extends Collection<T>> C copyValuesToJavaCollection(
ReadableStringMap<T> source, final C target) {
source.each(new StringMap.ProcV<T>() {
@Override
public void apply(String key, T value) {
target.add(value);
}
});
return target;
}
/**
* Creates a new java set with the same contents as the source StringSet.
*/
public static Set<String> newJavaSet(ReadableStringSet source) {
return copyToJavaCollection(source, new HashSet<String>());
}
/**
* Creates a new java list with the same contents as the source StringSet.
*/
public static List<String> newJavaList(ReadableStringSet source) {
return copyToJavaCollection(source, new ArrayList<String>());
}
/**
* Creates a new java list with the same contents as the values of the source
* StringMap.
*/
public static <T> List<T> newJavaList(ReadableStringMap<T> source) {
return copyValuesToJavaCollection(source, new ArrayList<T>());
}
/**
* Returns a StringMap view of the specified map.
*/
public static <V> StringMap<V> adaptStringMap(Map<String, V> a) {
return new StringMapAdapter<V>(a);
}
/**
* Returns a StringMap view of the specified map.
*/
public static <V> NumberMap<V> adaptNumberMap(Map<Double, V> a) {
return new NumberMapAdapter<V>(a);
}
/**
* Returns a StringMap view of the specified map.
*/
public static <V> IntMap<V> adaptIntMap(Map<Integer, V> a) {
return new IntMapAdapter<V>(a);
}
/**
* Returns a StringSet view of the specified set.
*/
public static StringSet adaptStringSet(Set<String> a) {
return new StringSetAdapter(a);
}
/**
* Returns a NumberPriorityQueue adaptor of a regular java.util.PriorityQueue
*/
public static NumberPriorityQueue adaptNumberPriorityQueue(PriorityQueue<Double> priorityQueue) {
return new NumberPriorityQueueAdapter(priorityQueue);
}
/**
* Returns a StringSet copy of the specified set.
*/
public static StringSet newStringSet(Set<String> a) {
StringSet s = createStringSet();
for (String value : a) {
s.add(value);
}
return s;
}
/**
* Returns a StringSet consisting of the specified values, removing duplicates
*/
public static StringSet newStringSet(String... values) {
StringSet s = createStringSet();
for (String value : values) {
s.add(value);
}
return s;
}
/**
* Returns a StringMap consisting of the specified key-value pairs
*/
public static StringMap<String> newStringMap(String... pairs) {
Preconditions.checkArgument(pairs.length % 2 == 0, "newStringMap: pairs must have even length");
StringMap<String> m = createStringMap();
for (int i = 0; i < pairs.length; i += 2) {
m.put(pairs[i], pairs[i + 1]);
}
return m;
}
/**
* Returns a list containing all the values of the given string map. The result
* will be a copy not backed by the map so it is safe to modify the map while
* concurrently iterating the list.
*/
public static <T> List<T> valueList(ReadableStringMap<T> map) {
final List<T> result = newArrayList();
map.each(new ProcV<T>() {
public void apply(String key, T value) {
result.add(value);
}
});
return result;
}
/**
* Joins an array of strings with the given separator
*/
public static String join(char separator, String first, String... rest) {
StringBuilder ret = new StringBuilder(first);
for (int i = 0; i < rest.length; i++) {
ret.append(separator);
ret.append(rest[i]);
}
return ret.toString();
}
/**
* Joins an array of strings with the given separator
*/
public static String join(char separator, String... parts) {
StringBuilder ret = new StringBuilder();
if (parts.length > 0) {
ret.append(parts[0]);
}
for (int i = 1; i < parts.length; i++) {
ret.append(separator);
ret.append(parts[i]);
}
return ret.toString();
}
/**
* Joins an array of strings.
*/
public static String join(String... parts) {
StringBuilder ret = new StringBuilder();
if (parts.length > 0) {
ret.append(parts[0]);
}
for (int i = 1; i < parts.length; i++) {
ret.append(parts[i]);
}
return ret.toString();
}
public static String repeat(char component, int repeat) {
Preconditions.checkArgument(repeat >= 0, "Cannot have negative repeat");
char[] chars = new char[repeat];
Arrays.fill(chars, component);
return String.valueOf(chars);
}
}
|
googleapis/google-cloud-java | 38,112 | java-telcoautomation/proto-google-cloud-telcoautomation-v1alpha1/src/main/java/com/google/cloud/telcoautomation/v1alpha1/ListDeploymentsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/telcoautomation/v1alpha1/telcoautomation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.telcoautomation.v1alpha1;
/**
*
*
* <pre>
* Response object for `ListDeployments`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse}
*/
public final class ListDeploymentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse)
ListDeploymentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDeploymentsResponse.newBuilder() to construct.
private ListDeploymentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDeploymentsResponse() {
deployments_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDeploymentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListDeploymentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListDeploymentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse.class,
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse.Builder.class);
}
public static final int DEPLOYMENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.telcoautomation.v1alpha1.Deployment> deployments_;
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.telcoautomation.v1alpha1.Deployment> getDeploymentsList() {
return deployments_;
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder>
getDeploymentsOrBuilderList() {
return deployments_;
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
@java.lang.Override
public int getDeploymentsCount() {
return deployments_.size();
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.Deployment getDeployments(int index) {
return deployments_.get(index);
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder getDeploymentsOrBuilder(
int index) {
return deployments_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < deployments_.size(); i++) {
output.writeMessage(1, deployments_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < deployments_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, deployments_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse)) {
return super.equals(obj);
}
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse other =
(com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse) obj;
if (!getDeploymentsList().equals(other.getDeploymentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDeploymentsCount() > 0) {
hash = (37 * hash) + DEPLOYMENTS_FIELD_NUMBER;
hash = (53 * hash) + getDeploymentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response object for `ListDeployments`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse)
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListDeploymentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListDeploymentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse.class,
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse.Builder.class);
}
// Construct using
// com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (deploymentsBuilder_ == null) {
deployments_ = java.util.Collections.emptyList();
} else {
deployments_ = null;
deploymentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.telcoautomation.v1alpha1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1alpha1_ListDeploymentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse
getDefaultInstanceForType() {
return com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse build() {
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse buildPartial() {
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse result =
new com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse result) {
if (deploymentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
deployments_ = java.util.Collections.unmodifiableList(deployments_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.deployments_ = deployments_;
} else {
result.deployments_ = deploymentsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse) {
return mergeFrom((com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse other) {
if (other
== com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse.getDefaultInstance())
return this;
if (deploymentsBuilder_ == null) {
if (!other.deployments_.isEmpty()) {
if (deployments_.isEmpty()) {
deployments_ = other.deployments_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDeploymentsIsMutable();
deployments_.addAll(other.deployments_);
}
onChanged();
}
} else {
if (!other.deployments_.isEmpty()) {
if (deploymentsBuilder_.isEmpty()) {
deploymentsBuilder_.dispose();
deploymentsBuilder_ = null;
deployments_ = other.deployments_;
bitField0_ = (bitField0_ & ~0x00000001);
deploymentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDeploymentsFieldBuilder()
: null;
} else {
deploymentsBuilder_.addAllMessages(other.deployments_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.telcoautomation.v1alpha1.Deployment m =
input.readMessage(
com.google.cloud.telcoautomation.v1alpha1.Deployment.parser(),
extensionRegistry);
if (deploymentsBuilder_ == null) {
ensureDeploymentsIsMutable();
deployments_.add(m);
} else {
deploymentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.telcoautomation.v1alpha1.Deployment> deployments_ =
java.util.Collections.emptyList();
private void ensureDeploymentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
deployments_ =
new java.util.ArrayList<com.google.cloud.telcoautomation.v1alpha1.Deployment>(
deployments_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.telcoautomation.v1alpha1.Deployment,
com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder,
com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder>
deploymentsBuilder_;
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public java.util.List<com.google.cloud.telcoautomation.v1alpha1.Deployment>
getDeploymentsList() {
if (deploymentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(deployments_);
} else {
return deploymentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public int getDeploymentsCount() {
if (deploymentsBuilder_ == null) {
return deployments_.size();
} else {
return deploymentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public com.google.cloud.telcoautomation.v1alpha1.Deployment getDeployments(int index) {
if (deploymentsBuilder_ == null) {
return deployments_.get(index);
} else {
return deploymentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public Builder setDeployments(
int index, com.google.cloud.telcoautomation.v1alpha1.Deployment value) {
if (deploymentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDeploymentsIsMutable();
deployments_.set(index, value);
onChanged();
} else {
deploymentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public Builder setDeployments(
int index, com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder builderForValue) {
if (deploymentsBuilder_ == null) {
ensureDeploymentsIsMutable();
deployments_.set(index, builderForValue.build());
onChanged();
} else {
deploymentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public Builder addDeployments(com.google.cloud.telcoautomation.v1alpha1.Deployment value) {
if (deploymentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDeploymentsIsMutable();
deployments_.add(value);
onChanged();
} else {
deploymentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public Builder addDeployments(
int index, com.google.cloud.telcoautomation.v1alpha1.Deployment value) {
if (deploymentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDeploymentsIsMutable();
deployments_.add(index, value);
onChanged();
} else {
deploymentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public Builder addDeployments(
com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder builderForValue) {
if (deploymentsBuilder_ == null) {
ensureDeploymentsIsMutable();
deployments_.add(builderForValue.build());
onChanged();
} else {
deploymentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public Builder addDeployments(
int index, com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder builderForValue) {
if (deploymentsBuilder_ == null) {
ensureDeploymentsIsMutable();
deployments_.add(index, builderForValue.build());
onChanged();
} else {
deploymentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public Builder addAllDeployments(
java.lang.Iterable<? extends com.google.cloud.telcoautomation.v1alpha1.Deployment> values) {
if (deploymentsBuilder_ == null) {
ensureDeploymentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, deployments_);
onChanged();
} else {
deploymentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public Builder clearDeployments() {
if (deploymentsBuilder_ == null) {
deployments_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
deploymentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public Builder removeDeployments(int index) {
if (deploymentsBuilder_ == null) {
ensureDeploymentsIsMutable();
deployments_.remove(index);
onChanged();
} else {
deploymentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder getDeploymentsBuilder(
int index) {
return getDeploymentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder getDeploymentsOrBuilder(
int index) {
if (deploymentsBuilder_ == null) {
return deployments_.get(index);
} else {
return deploymentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public java.util.List<? extends com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder>
getDeploymentsOrBuilderList() {
if (deploymentsBuilder_ != null) {
return deploymentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(deployments_);
}
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder addDeploymentsBuilder() {
return getDeploymentsFieldBuilder()
.addBuilder(com.google.cloud.telcoautomation.v1alpha1.Deployment.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder addDeploymentsBuilder(
int index) {
return getDeploymentsFieldBuilder()
.addBuilder(
index, com.google.cloud.telcoautomation.v1alpha1.Deployment.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of requested deployments.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1alpha1.Deployment deployments = 1;</code>
*/
public java.util.List<com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder>
getDeploymentsBuilderList() {
return getDeploymentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.telcoautomation.v1alpha1.Deployment,
com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder,
com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder>
getDeploymentsFieldBuilder() {
if (deploymentsBuilder_ == null) {
deploymentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.telcoautomation.v1alpha1.Deployment,
com.google.cloud.telcoautomation.v1alpha1.Deployment.Builder,
com.google.cloud.telcoautomation.v1alpha1.DeploymentOrBuilder>(
deployments_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
deployments_ = null;
}
return deploymentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse)
private static final com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse();
}
public static com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDeploymentsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDeploymentsResponse>() {
@java.lang.Override
public ListDeploymentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDeploymentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDeploymentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1alpha1.ListDeploymentsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,107 | java-analytics-data/proto-google-analytics-data-v1alpha/src/main/java/com/google/analytics/data/v1alpha/ListReportTasksResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/data/v1alpha/analytics_data_api.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.data.v1alpha;
/**
*
*
* <pre>
* A list of all report tasks for a property.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.ListReportTasksResponse}
*/
public final class ListReportTasksResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.data.v1alpha.ListReportTasksResponse)
ListReportTasksResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListReportTasksResponse.newBuilder() to construct.
private ListReportTasksResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListReportTasksResponse() {
reportTasks_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListReportTasksResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.AnalyticsDataApiProto
.internal_static_google_analytics_data_v1alpha_ListReportTasksResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.AnalyticsDataApiProto
.internal_static_google_analytics_data_v1alpha_ListReportTasksResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.ListReportTasksResponse.class,
com.google.analytics.data.v1alpha.ListReportTasksResponse.Builder.class);
}
private int bitField0_;
public static final int REPORT_TASKS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.analytics.data.v1alpha.ReportTask> reportTasks_;
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.analytics.data.v1alpha.ReportTask> getReportTasksList() {
return reportTasks_;
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.analytics.data.v1alpha.ReportTaskOrBuilder>
getReportTasksOrBuilderList() {
return reportTasks_;
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
@java.lang.Override
public int getReportTasksCount() {
return reportTasks_.size();
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.ReportTask getReportTasks(int index) {
return reportTasks_.get(index);
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
@java.lang.Override
public com.google.analytics.data.v1alpha.ReportTaskOrBuilder getReportTasksOrBuilder(int index) {
return reportTasks_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>optional string next_page_token = 2;</code>
*
* @return Whether the nextPageToken field is set.
*/
@java.lang.Override
public boolean hasNextPageToken() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>optional string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>optional string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < reportTasks_.size(); i++) {
output.writeMessage(1, reportTasks_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < reportTasks_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, reportTasks_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.data.v1alpha.ListReportTasksResponse)) {
return super.equals(obj);
}
com.google.analytics.data.v1alpha.ListReportTasksResponse other =
(com.google.analytics.data.v1alpha.ListReportTasksResponse) obj;
if (!getReportTasksList().equals(other.getReportTasksList())) return false;
if (hasNextPageToken() != other.hasNextPageToken()) return false;
if (hasNextPageToken()) {
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getReportTasksCount() > 0) {
hash = (37 * hash) + REPORT_TASKS_FIELD_NUMBER;
hash = (53 * hash) + getReportTasksList().hashCode();
}
if (hasNextPageToken()) {
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.data.v1alpha.ListReportTasksResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A list of all report tasks for a property.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1alpha.ListReportTasksResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.data.v1alpha.ListReportTasksResponse)
com.google.analytics.data.v1alpha.ListReportTasksResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1alpha.AnalyticsDataApiProto
.internal_static_google_analytics_data_v1alpha_ListReportTasksResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1alpha.AnalyticsDataApiProto
.internal_static_google_analytics_data_v1alpha_ListReportTasksResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1alpha.ListReportTasksResponse.class,
com.google.analytics.data.v1alpha.ListReportTasksResponse.Builder.class);
}
// Construct using com.google.analytics.data.v1alpha.ListReportTasksResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (reportTasksBuilder_ == null) {
reportTasks_ = java.util.Collections.emptyList();
} else {
reportTasks_ = null;
reportTasksBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.data.v1alpha.AnalyticsDataApiProto
.internal_static_google_analytics_data_v1alpha_ListReportTasksResponse_descriptor;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.ListReportTasksResponse getDefaultInstanceForType() {
return com.google.analytics.data.v1alpha.ListReportTasksResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.data.v1alpha.ListReportTasksResponse build() {
com.google.analytics.data.v1alpha.ListReportTasksResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.ListReportTasksResponse buildPartial() {
com.google.analytics.data.v1alpha.ListReportTasksResponse result =
new com.google.analytics.data.v1alpha.ListReportTasksResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.analytics.data.v1alpha.ListReportTasksResponse result) {
if (reportTasksBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
reportTasks_ = java.util.Collections.unmodifiableList(reportTasks_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.reportTasks_ = reportTasks_;
} else {
result.reportTasks_ = reportTasksBuilder_.build();
}
}
private void buildPartial0(com.google.analytics.data.v1alpha.ListReportTasksResponse result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.data.v1alpha.ListReportTasksResponse) {
return mergeFrom((com.google.analytics.data.v1alpha.ListReportTasksResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.data.v1alpha.ListReportTasksResponse other) {
if (other == com.google.analytics.data.v1alpha.ListReportTasksResponse.getDefaultInstance())
return this;
if (reportTasksBuilder_ == null) {
if (!other.reportTasks_.isEmpty()) {
if (reportTasks_.isEmpty()) {
reportTasks_ = other.reportTasks_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureReportTasksIsMutable();
reportTasks_.addAll(other.reportTasks_);
}
onChanged();
}
} else {
if (!other.reportTasks_.isEmpty()) {
if (reportTasksBuilder_.isEmpty()) {
reportTasksBuilder_.dispose();
reportTasksBuilder_ = null;
reportTasks_ = other.reportTasks_;
bitField0_ = (bitField0_ & ~0x00000001);
reportTasksBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getReportTasksFieldBuilder()
: null;
} else {
reportTasksBuilder_.addAllMessages(other.reportTasks_);
}
}
}
if (other.hasNextPageToken()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.analytics.data.v1alpha.ReportTask m =
input.readMessage(
com.google.analytics.data.v1alpha.ReportTask.parser(), extensionRegistry);
if (reportTasksBuilder_ == null) {
ensureReportTasksIsMutable();
reportTasks_.add(m);
} else {
reportTasksBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.analytics.data.v1alpha.ReportTask> reportTasks_ =
java.util.Collections.emptyList();
private void ensureReportTasksIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
reportTasks_ =
new java.util.ArrayList<com.google.analytics.data.v1alpha.ReportTask>(reportTasks_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.data.v1alpha.ReportTask,
com.google.analytics.data.v1alpha.ReportTask.Builder,
com.google.analytics.data.v1alpha.ReportTaskOrBuilder>
reportTasksBuilder_;
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public java.util.List<com.google.analytics.data.v1alpha.ReportTask> getReportTasksList() {
if (reportTasksBuilder_ == null) {
return java.util.Collections.unmodifiableList(reportTasks_);
} else {
return reportTasksBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public int getReportTasksCount() {
if (reportTasksBuilder_ == null) {
return reportTasks_.size();
} else {
return reportTasksBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public com.google.analytics.data.v1alpha.ReportTask getReportTasks(int index) {
if (reportTasksBuilder_ == null) {
return reportTasks_.get(index);
} else {
return reportTasksBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public Builder setReportTasks(int index, com.google.analytics.data.v1alpha.ReportTask value) {
if (reportTasksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReportTasksIsMutable();
reportTasks_.set(index, value);
onChanged();
} else {
reportTasksBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public Builder setReportTasks(
int index, com.google.analytics.data.v1alpha.ReportTask.Builder builderForValue) {
if (reportTasksBuilder_ == null) {
ensureReportTasksIsMutable();
reportTasks_.set(index, builderForValue.build());
onChanged();
} else {
reportTasksBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public Builder addReportTasks(com.google.analytics.data.v1alpha.ReportTask value) {
if (reportTasksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReportTasksIsMutable();
reportTasks_.add(value);
onChanged();
} else {
reportTasksBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public Builder addReportTasks(int index, com.google.analytics.data.v1alpha.ReportTask value) {
if (reportTasksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReportTasksIsMutable();
reportTasks_.add(index, value);
onChanged();
} else {
reportTasksBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public Builder addReportTasks(
com.google.analytics.data.v1alpha.ReportTask.Builder builderForValue) {
if (reportTasksBuilder_ == null) {
ensureReportTasksIsMutable();
reportTasks_.add(builderForValue.build());
onChanged();
} else {
reportTasksBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public Builder addReportTasks(
int index, com.google.analytics.data.v1alpha.ReportTask.Builder builderForValue) {
if (reportTasksBuilder_ == null) {
ensureReportTasksIsMutable();
reportTasks_.add(index, builderForValue.build());
onChanged();
} else {
reportTasksBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public Builder addAllReportTasks(
java.lang.Iterable<? extends com.google.analytics.data.v1alpha.ReportTask> values) {
if (reportTasksBuilder_ == null) {
ensureReportTasksIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, reportTasks_);
onChanged();
} else {
reportTasksBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public Builder clearReportTasks() {
if (reportTasksBuilder_ == null) {
reportTasks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
reportTasksBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public Builder removeReportTasks(int index) {
if (reportTasksBuilder_ == null) {
ensureReportTasksIsMutable();
reportTasks_.remove(index);
onChanged();
} else {
reportTasksBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public com.google.analytics.data.v1alpha.ReportTask.Builder getReportTasksBuilder(int index) {
return getReportTasksFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public com.google.analytics.data.v1alpha.ReportTaskOrBuilder getReportTasksOrBuilder(
int index) {
if (reportTasksBuilder_ == null) {
return reportTasks_.get(index);
} else {
return reportTasksBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public java.util.List<? extends com.google.analytics.data.v1alpha.ReportTaskOrBuilder>
getReportTasksOrBuilderList() {
if (reportTasksBuilder_ != null) {
return reportTasksBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(reportTasks_);
}
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public com.google.analytics.data.v1alpha.ReportTask.Builder addReportTasksBuilder() {
return getReportTasksFieldBuilder()
.addBuilder(com.google.analytics.data.v1alpha.ReportTask.getDefaultInstance());
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public com.google.analytics.data.v1alpha.ReportTask.Builder addReportTasksBuilder(int index) {
return getReportTasksFieldBuilder()
.addBuilder(index, com.google.analytics.data.v1alpha.ReportTask.getDefaultInstance());
}
/**
*
*
* <pre>
* Each report task for a property.
* </pre>
*
* <code>repeated .google.analytics.data.v1alpha.ReportTask report_tasks = 1;</code>
*/
public java.util.List<com.google.analytics.data.v1alpha.ReportTask.Builder>
getReportTasksBuilderList() {
return getReportTasksFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.data.v1alpha.ReportTask,
com.google.analytics.data.v1alpha.ReportTask.Builder,
com.google.analytics.data.v1alpha.ReportTaskOrBuilder>
getReportTasksFieldBuilder() {
if (reportTasksBuilder_ == null) {
reportTasksBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.data.v1alpha.ReportTask,
com.google.analytics.data.v1alpha.ReportTask.Builder,
com.google.analytics.data.v1alpha.ReportTaskOrBuilder>(
reportTasks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
reportTasks_ = null;
}
return reportTasksBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>optional string next_page_token = 2;</code>
*
* @return Whether the nextPageToken field is set.
*/
public boolean hasNextPageToken() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>optional string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>optional string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>optional string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>optional string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>optional string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.data.v1alpha.ListReportTasksResponse)
}
// @@protoc_insertion_point(class_scope:google.analytics.data.v1alpha.ListReportTasksResponse)
private static final com.google.analytics.data.v1alpha.ListReportTasksResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.data.v1alpha.ListReportTasksResponse();
}
public static com.google.analytics.data.v1alpha.ListReportTasksResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListReportTasksResponse> PARSER =
new com.google.protobuf.AbstractParser<ListReportTasksResponse>() {
@java.lang.Override
public ListReportTasksResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListReportTasksResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListReportTasksResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.data.v1alpha.ListReportTasksResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,127 | java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/ListKnowledgeBasesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/knowledge_base.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* Response message for
* [KnowledgeBases.ListKnowledgeBases][google.cloud.dialogflow.v2beta1.KnowledgeBases.ListKnowledgeBases].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse}
*/
public final class ListKnowledgeBasesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse)
ListKnowledgeBasesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListKnowledgeBasesResponse.newBuilder() to construct.
private ListKnowledgeBasesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListKnowledgeBasesResponse() {
knowledgeBases_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListKnowledgeBasesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.KnowledgeBaseProto
.internal_static_google_cloud_dialogflow_v2beta1_ListKnowledgeBasesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.KnowledgeBaseProto
.internal_static_google_cloud_dialogflow_v2beta1_ListKnowledgeBasesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse.class,
com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse.Builder.class);
}
public static final int KNOWLEDGE_BASES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dialogflow.v2beta1.KnowledgeBase> knowledgeBases_;
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dialogflow.v2beta1.KnowledgeBase> getKnowledgeBasesList() {
return knowledgeBases_;
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder>
getKnowledgeBasesOrBuilderList() {
return knowledgeBases_;
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
@java.lang.Override
public int getKnowledgeBasesCount() {
return knowledgeBases_.size();
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.KnowledgeBase getKnowledgeBases(int index) {
return knowledgeBases_.get(index);
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder getKnowledgeBasesOrBuilder(
int index) {
return knowledgeBases_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < knowledgeBases_.size(); i++) {
output.writeMessage(1, knowledgeBases_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < knowledgeBases_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, knowledgeBases_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse other =
(com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse) obj;
if (!getKnowledgeBasesList().equals(other.getKnowledgeBasesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getKnowledgeBasesCount() > 0) {
hash = (37 * hash) + KNOWLEDGE_BASES_FIELD_NUMBER;
hash = (53 * hash) + getKnowledgeBasesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [KnowledgeBases.ListKnowledgeBases][google.cloud.dialogflow.v2beta1.KnowledgeBases.ListKnowledgeBases].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse)
com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.KnowledgeBaseProto
.internal_static_google_cloud_dialogflow_v2beta1_ListKnowledgeBasesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.KnowledgeBaseProto
.internal_static_google_cloud_dialogflow_v2beta1_ListKnowledgeBasesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse.class,
com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (knowledgeBasesBuilder_ == null) {
knowledgeBases_ = java.util.Collections.emptyList();
} else {
knowledgeBases_ = null;
knowledgeBasesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.KnowledgeBaseProto
.internal_static_google_cloud_dialogflow_v2beta1_ListKnowledgeBasesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse build() {
com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse buildPartial() {
com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse result =
new com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse result) {
if (knowledgeBasesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
knowledgeBases_ = java.util.Collections.unmodifiableList(knowledgeBases_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.knowledgeBases_ = knowledgeBases_;
} else {
result.knowledgeBases_ = knowledgeBasesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse) {
return mergeFrom((com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse other) {
if (other
== com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse.getDefaultInstance())
return this;
if (knowledgeBasesBuilder_ == null) {
if (!other.knowledgeBases_.isEmpty()) {
if (knowledgeBases_.isEmpty()) {
knowledgeBases_ = other.knowledgeBases_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureKnowledgeBasesIsMutable();
knowledgeBases_.addAll(other.knowledgeBases_);
}
onChanged();
}
} else {
if (!other.knowledgeBases_.isEmpty()) {
if (knowledgeBasesBuilder_.isEmpty()) {
knowledgeBasesBuilder_.dispose();
knowledgeBasesBuilder_ = null;
knowledgeBases_ = other.knowledgeBases_;
bitField0_ = (bitField0_ & ~0x00000001);
knowledgeBasesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getKnowledgeBasesFieldBuilder()
: null;
} else {
knowledgeBasesBuilder_.addAllMessages(other.knowledgeBases_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dialogflow.v2beta1.KnowledgeBase m =
input.readMessage(
com.google.cloud.dialogflow.v2beta1.KnowledgeBase.parser(),
extensionRegistry);
if (knowledgeBasesBuilder_ == null) {
ensureKnowledgeBasesIsMutable();
knowledgeBases_.add(m);
} else {
knowledgeBasesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dialogflow.v2beta1.KnowledgeBase> knowledgeBases_ =
java.util.Collections.emptyList();
private void ensureKnowledgeBasesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
knowledgeBases_ =
new java.util.ArrayList<com.google.cloud.dialogflow.v2beta1.KnowledgeBase>(
knowledgeBases_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.KnowledgeBase,
com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder,
com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder>
knowledgeBasesBuilder_;
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2beta1.KnowledgeBase>
getKnowledgeBasesList() {
if (knowledgeBasesBuilder_ == null) {
return java.util.Collections.unmodifiableList(knowledgeBases_);
} else {
return knowledgeBasesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public int getKnowledgeBasesCount() {
if (knowledgeBasesBuilder_ == null) {
return knowledgeBases_.size();
} else {
return knowledgeBasesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.KnowledgeBase getKnowledgeBases(int index) {
if (knowledgeBasesBuilder_ == null) {
return knowledgeBases_.get(index);
} else {
return knowledgeBasesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public Builder setKnowledgeBases(
int index, com.google.cloud.dialogflow.v2beta1.KnowledgeBase value) {
if (knowledgeBasesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureKnowledgeBasesIsMutable();
knowledgeBases_.set(index, value);
onChanged();
} else {
knowledgeBasesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public Builder setKnowledgeBases(
int index, com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder builderForValue) {
if (knowledgeBasesBuilder_ == null) {
ensureKnowledgeBasesIsMutable();
knowledgeBases_.set(index, builderForValue.build());
onChanged();
} else {
knowledgeBasesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public Builder addKnowledgeBases(com.google.cloud.dialogflow.v2beta1.KnowledgeBase value) {
if (knowledgeBasesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureKnowledgeBasesIsMutable();
knowledgeBases_.add(value);
onChanged();
} else {
knowledgeBasesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public Builder addKnowledgeBases(
int index, com.google.cloud.dialogflow.v2beta1.KnowledgeBase value) {
if (knowledgeBasesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureKnowledgeBasesIsMutable();
knowledgeBases_.add(index, value);
onChanged();
} else {
knowledgeBasesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public Builder addKnowledgeBases(
com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder builderForValue) {
if (knowledgeBasesBuilder_ == null) {
ensureKnowledgeBasesIsMutable();
knowledgeBases_.add(builderForValue.build());
onChanged();
} else {
knowledgeBasesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public Builder addKnowledgeBases(
int index, com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder builderForValue) {
if (knowledgeBasesBuilder_ == null) {
ensureKnowledgeBasesIsMutable();
knowledgeBases_.add(index, builderForValue.build());
onChanged();
} else {
knowledgeBasesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public Builder addAllKnowledgeBases(
java.lang.Iterable<? extends com.google.cloud.dialogflow.v2beta1.KnowledgeBase> values) {
if (knowledgeBasesBuilder_ == null) {
ensureKnowledgeBasesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, knowledgeBases_);
onChanged();
} else {
knowledgeBasesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public Builder clearKnowledgeBases() {
if (knowledgeBasesBuilder_ == null) {
knowledgeBases_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
knowledgeBasesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public Builder removeKnowledgeBases(int index) {
if (knowledgeBasesBuilder_ == null) {
ensureKnowledgeBasesIsMutable();
knowledgeBases_.remove(index);
onChanged();
} else {
knowledgeBasesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder getKnowledgeBasesBuilder(
int index) {
return getKnowledgeBasesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder getKnowledgeBasesOrBuilder(
int index) {
if (knowledgeBasesBuilder_ == null) {
return knowledgeBases_.get(index);
} else {
return knowledgeBasesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder>
getKnowledgeBasesOrBuilderList() {
if (knowledgeBasesBuilder_ != null) {
return knowledgeBasesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(knowledgeBases_);
}
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder addKnowledgeBasesBuilder() {
return getKnowledgeBasesFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.v2beta1.KnowledgeBase.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder addKnowledgeBasesBuilder(
int index) {
return getKnowledgeBasesFieldBuilder()
.addBuilder(
index, com.google.cloud.dialogflow.v2beta1.KnowledgeBase.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of knowledge bases.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_bases = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder>
getKnowledgeBasesBuilderList() {
return getKnowledgeBasesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.KnowledgeBase,
com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder,
com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder>
getKnowledgeBasesFieldBuilder() {
if (knowledgeBasesBuilder_ == null) {
knowledgeBasesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.KnowledgeBase,
com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder,
com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder>(
knowledgeBases_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
knowledgeBases_ = null;
}
return knowledgeBasesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse)
private static final com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse();
}
public static com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListKnowledgeBasesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListKnowledgeBasesResponse>() {
@java.lang.Override
public ListKnowledgeBasesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListKnowledgeBasesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListKnowledgeBasesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,410 | java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/InterconnectGroupsStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.InterconnectGroupsClient.ListPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.httpjson.ProtoOperationTransformers;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.CreateMembersInterconnectGroupRequest;
import com.google.cloud.compute.v1.DeleteInterconnectGroupRequest;
import com.google.cloud.compute.v1.GetIamPolicyInterconnectGroupRequest;
import com.google.cloud.compute.v1.GetInterconnectGroupRequest;
import com.google.cloud.compute.v1.GetOperationalStatusInterconnectGroupRequest;
import com.google.cloud.compute.v1.InsertInterconnectGroupRequest;
import com.google.cloud.compute.v1.InterconnectGroup;
import com.google.cloud.compute.v1.InterconnectGroupsGetOperationalStatusResponse;
import com.google.cloud.compute.v1.InterconnectGroupsListResponse;
import com.google.cloud.compute.v1.ListInterconnectGroupsRequest;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.PatchInterconnectGroupRequest;
import com.google.cloud.compute.v1.Policy;
import com.google.cloud.compute.v1.SetIamPolicyInterconnectGroupRequest;
import com.google.cloud.compute.v1.TestIamPermissionsInterconnectGroupRequest;
import com.google.cloud.compute.v1.TestPermissionsResponse;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link InterconnectGroupsStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (compute.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of get:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* InterconnectGroupsStubSettings.Builder interconnectGroupsSettingsBuilder =
* InterconnectGroupsStubSettings.newBuilder();
* interconnectGroupsSettingsBuilder
* .getSettings()
* .setRetrySettings(
* interconnectGroupsSettingsBuilder
* .getSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* InterconnectGroupsStubSettings interconnectGroupsSettings =
* interconnectGroupsSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for createMembers:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* InterconnectGroupsStubSettings.Builder interconnectGroupsSettingsBuilder =
* InterconnectGroupsStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* interconnectGroupsSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class InterconnectGroupsStubSettings extends StubSettings<InterconnectGroupsStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/compute")
.add("https://www.googleapis.com/auth/cloud-platform")
.build();
private final UnaryCallSettings<CreateMembersInterconnectGroupRequest, Operation>
createMembersSettings;
private final OperationCallSettings<CreateMembersInterconnectGroupRequest, Operation, Operation>
createMembersOperationSettings;
private final UnaryCallSettings<DeleteInterconnectGroupRequest, Operation> deleteSettings;
private final OperationCallSettings<DeleteInterconnectGroupRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings<GetInterconnectGroupRequest, InterconnectGroup> getSettings;
private final UnaryCallSettings<GetIamPolicyInterconnectGroupRequest, Policy>
getIamPolicySettings;
private final UnaryCallSettings<
GetOperationalStatusInterconnectGroupRequest,
InterconnectGroupsGetOperationalStatusResponse>
getOperationalStatusSettings;
private final UnaryCallSettings<InsertInterconnectGroupRequest, Operation> insertSettings;
private final OperationCallSettings<InsertInterconnectGroupRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings<
ListInterconnectGroupsRequest, InterconnectGroupsListResponse, ListPagedResponse>
listSettings;
private final UnaryCallSettings<PatchInterconnectGroupRequest, Operation> patchSettings;
private final OperationCallSettings<PatchInterconnectGroupRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings<SetIamPolicyInterconnectGroupRequest, Policy>
setIamPolicySettings;
private final UnaryCallSettings<
TestIamPermissionsInterconnectGroupRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final PagedListDescriptor<
ListInterconnectGroupsRequest, InterconnectGroupsListResponse, InterconnectGroup>
LIST_PAGE_STR_DESC =
new PagedListDescriptor<
ListInterconnectGroupsRequest, InterconnectGroupsListResponse, InterconnectGroup>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListInterconnectGroupsRequest injectToken(
ListInterconnectGroupsRequest payload, String token) {
return ListInterconnectGroupsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListInterconnectGroupsRequest injectPageSize(
ListInterconnectGroupsRequest payload, int pageSize) {
return ListInterconnectGroupsRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(ListInterconnectGroupsRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(InterconnectGroupsListResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<InterconnectGroup> extractResources(
InterconnectGroupsListResponse payload) {
return payload.getItemsList();
}
};
private static final PagedListResponseFactory<
ListInterconnectGroupsRequest, InterconnectGroupsListResponse, ListPagedResponse>
LIST_PAGE_STR_FACT =
new PagedListResponseFactory<
ListInterconnectGroupsRequest, InterconnectGroupsListResponse, ListPagedResponse>() {
@Override
public ApiFuture<ListPagedResponse> getFuturePagedResponse(
UnaryCallable<ListInterconnectGroupsRequest, InterconnectGroupsListResponse>
callable,
ListInterconnectGroupsRequest request,
ApiCallContext context,
ApiFuture<InterconnectGroupsListResponse> futureResponse) {
PageContext<
ListInterconnectGroupsRequest,
InterconnectGroupsListResponse,
InterconnectGroup>
pageContext = PageContext.create(callable, LIST_PAGE_STR_DESC, request, context);
return ListPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to createMembers. */
public UnaryCallSettings<CreateMembersInterconnectGroupRequest, Operation>
createMembersSettings() {
return createMembersSettings;
}
/** Returns the object with the settings used for calls to createMembers. */
public OperationCallSettings<CreateMembersInterconnectGroupRequest, Operation, Operation>
createMembersOperationSettings() {
return createMembersOperationSettings;
}
/** Returns the object with the settings used for calls to delete. */
public UnaryCallSettings<DeleteInterconnectGroupRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the object with the settings used for calls to delete. */
public OperationCallSettings<DeleteInterconnectGroupRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the object with the settings used for calls to get. */
public UnaryCallSettings<GetInterconnectGroupRequest, InterconnectGroup> getSettings() {
return getSettings;
}
/** Returns the object with the settings used for calls to getIamPolicy. */
public UnaryCallSettings<GetIamPolicyInterconnectGroupRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the object with the settings used for calls to getOperationalStatus. */
public UnaryCallSettings<
GetOperationalStatusInterconnectGroupRequest,
InterconnectGroupsGetOperationalStatusResponse>
getOperationalStatusSettings() {
return getOperationalStatusSettings;
}
/** Returns the object with the settings used for calls to insert. */
public UnaryCallSettings<InsertInterconnectGroupRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the object with the settings used for calls to insert. */
public OperationCallSettings<InsertInterconnectGroupRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the object with the settings used for calls to list. */
public PagedCallSettings<
ListInterconnectGroupsRequest, InterconnectGroupsListResponse, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the object with the settings used for calls to patch. */
public UnaryCallSettings<PatchInterconnectGroupRequest, Operation> patchSettings() {
return patchSettings;
}
/** Returns the object with the settings used for calls to patch. */
public OperationCallSettings<PatchInterconnectGroupRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the object with the settings used for calls to setIamPolicy. */
public UnaryCallSettings<SetIamPolicyInterconnectGroupRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the object with the settings used for calls to testIamPermissions. */
public UnaryCallSettings<TestIamPermissionsInterconnectGroupRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
public InterconnectGroupsStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonInterconnectGroupsStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "compute";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "compute.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "compute.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultHttpJsonTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(InterconnectGroupsStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected InterconnectGroupsStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
createMembersSettings = settingsBuilder.createMembersSettings().build();
createMembersOperationSettings = settingsBuilder.createMembersOperationSettings().build();
deleteSettings = settingsBuilder.deleteSettings().build();
deleteOperationSettings = settingsBuilder.deleteOperationSettings().build();
getSettings = settingsBuilder.getSettings().build();
getIamPolicySettings = settingsBuilder.getIamPolicySettings().build();
getOperationalStatusSettings = settingsBuilder.getOperationalStatusSettings().build();
insertSettings = settingsBuilder.insertSettings().build();
insertOperationSettings = settingsBuilder.insertOperationSettings().build();
listSettings = settingsBuilder.listSettings().build();
patchSettings = settingsBuilder.patchSettings().build();
patchOperationSettings = settingsBuilder.patchOperationSettings().build();
setIamPolicySettings = settingsBuilder.setIamPolicySettings().build();
testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build();
}
/** Builder for InterconnectGroupsStubSettings. */
public static class Builder
extends StubSettings.Builder<InterconnectGroupsStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<CreateMembersInterconnectGroupRequest, Operation>
createMembersSettings;
private final OperationCallSettings.Builder<
CreateMembersInterconnectGroupRequest, Operation, Operation>
createMembersOperationSettings;
private final UnaryCallSettings.Builder<DeleteInterconnectGroupRequest, Operation>
deleteSettings;
private final OperationCallSettings.Builder<
DeleteInterconnectGroupRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings.Builder<GetInterconnectGroupRequest, InterconnectGroup>
getSettings;
private final UnaryCallSettings.Builder<GetIamPolicyInterconnectGroupRequest, Policy>
getIamPolicySettings;
private final UnaryCallSettings.Builder<
GetOperationalStatusInterconnectGroupRequest,
InterconnectGroupsGetOperationalStatusResponse>
getOperationalStatusSettings;
private final UnaryCallSettings.Builder<InsertInterconnectGroupRequest, Operation>
insertSettings;
private final OperationCallSettings.Builder<
InsertInterconnectGroupRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings.Builder<
ListInterconnectGroupsRequest, InterconnectGroupsListResponse, ListPagedResponse>
listSettings;
private final UnaryCallSettings.Builder<PatchInterconnectGroupRequest, Operation> patchSettings;
private final OperationCallSettings.Builder<PatchInterconnectGroupRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings.Builder<SetIamPolicyInterconnectGroupRequest, Policy>
setIamPolicySettings;
private final UnaryCallSettings.Builder<
TestIamPermissionsInterconnectGroupRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE)));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("no_retry_1_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("retry_policy_0_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
createMembersSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createMembersOperationSettings = OperationCallSettings.newBuilder();
deleteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteOperationSettings = OperationCallSettings.newBuilder();
getSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getOperationalStatusSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertOperationSettings = OperationCallSettings.newBuilder();
listSettings = PagedCallSettings.newBuilder(LIST_PAGE_STR_FACT);
patchSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
patchOperationSettings = OperationCallSettings.newBuilder();
setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createMembersSettings,
deleteSettings,
getSettings,
getIamPolicySettings,
getOperationalStatusSettings,
insertSettings,
listSettings,
patchSettings,
setIamPolicySettings,
testIamPermissionsSettings);
initDefaults(this);
}
protected Builder(InterconnectGroupsStubSettings settings) {
super(settings);
createMembersSettings = settings.createMembersSettings.toBuilder();
createMembersOperationSettings = settings.createMembersOperationSettings.toBuilder();
deleteSettings = settings.deleteSettings.toBuilder();
deleteOperationSettings = settings.deleteOperationSettings.toBuilder();
getSettings = settings.getSettings.toBuilder();
getIamPolicySettings = settings.getIamPolicySettings.toBuilder();
getOperationalStatusSettings = settings.getOperationalStatusSettings.toBuilder();
insertSettings = settings.insertSettings.toBuilder();
insertOperationSettings = settings.insertOperationSettings.toBuilder();
listSettings = settings.listSettings.toBuilder();
patchSettings = settings.patchSettings.toBuilder();
patchOperationSettings = settings.patchOperationSettings.toBuilder();
setIamPolicySettings = settings.setIamPolicySettings.toBuilder();
testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createMembersSettings,
deleteSettings,
getSettings,
getIamPolicySettings,
getOperationalStatusSettings,
insertSettings,
listSettings,
patchSettings,
setIamPolicySettings,
testIamPermissionsSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.createMembersSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.deleteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.getSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getOperationalStatusSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.insertSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.patchSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.setIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.testIamPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.createMembersOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<CreateMembersInterconnectGroupRequest, OperationSnapshot>
newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.deleteOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeleteInterconnectGroupRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.insertOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<InsertInterconnectGroupRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.patchOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<PatchInterconnectGroupRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to createMembers. */
public UnaryCallSettings.Builder<CreateMembersInterconnectGroupRequest, Operation>
createMembersSettings() {
return createMembersSettings;
}
/** Returns the builder for the settings used for calls to createMembers. */
public OperationCallSettings.Builder<
CreateMembersInterconnectGroupRequest, Operation, Operation>
createMembersOperationSettings() {
return createMembersOperationSettings;
}
/** Returns the builder for the settings used for calls to delete. */
public UnaryCallSettings.Builder<DeleteInterconnectGroupRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the builder for the settings used for calls to delete. */
public OperationCallSettings.Builder<DeleteInterconnectGroupRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the builder for the settings used for calls to get. */
public UnaryCallSettings.Builder<GetInterconnectGroupRequest, InterconnectGroup> getSettings() {
return getSettings;
}
/** Returns the builder for the settings used for calls to getIamPolicy. */
public UnaryCallSettings.Builder<GetIamPolicyInterconnectGroupRequest, Policy>
getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the builder for the settings used for calls to getOperationalStatus. */
public UnaryCallSettings.Builder<
GetOperationalStatusInterconnectGroupRequest,
InterconnectGroupsGetOperationalStatusResponse>
getOperationalStatusSettings() {
return getOperationalStatusSettings;
}
/** Returns the builder for the settings used for calls to insert. */
public UnaryCallSettings.Builder<InsertInterconnectGroupRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the builder for the settings used for calls to insert. */
public OperationCallSettings.Builder<InsertInterconnectGroupRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the builder for the settings used for calls to list. */
public PagedCallSettings.Builder<
ListInterconnectGroupsRequest, InterconnectGroupsListResponse, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public UnaryCallSettings.Builder<PatchInterconnectGroupRequest, Operation> patchSettings() {
return patchSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public OperationCallSettings.Builder<PatchInterconnectGroupRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the builder for the settings used for calls to setIamPolicy. */
public UnaryCallSettings.Builder<SetIamPolicyInterconnectGroupRequest, Policy>
setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the builder for the settings used for calls to testIamPermissions. */
public UnaryCallSettings.Builder<
TestIamPermissionsInterconnectGroupRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
@Override
public InterconnectGroupsStubSettings build() throws IOException {
return new InterconnectGroupsStubSettings(this);
}
}
}
|
apache/olingo-odata4 | 36,648 | lib/server-test/src/test/java/org/apache/olingo/server/core/serializer/json/JsonDeltaSerializerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.server.core.serializer.json;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.olingo.commons.api.data.ContextURL;
import org.apache.olingo.commons.api.data.DeletedEntity;
import org.apache.olingo.commons.api.data.DeletedEntity.Reason;
import org.apache.olingo.commons.api.data.Delta;
import org.apache.olingo.commons.api.data.DeltaLink;
import org.apache.olingo.commons.api.data.Entity;
import org.apache.olingo.commons.api.data.ContextURL.Suffix;
import org.apache.olingo.commons.api.edm.EdmEntityContainer;
import org.apache.olingo.commons.api.edm.EdmEntitySet;
import org.apache.olingo.commons.api.edm.EdmEntityType;
import org.apache.olingo.commons.api.edmx.EdmxReference;
import org.apache.olingo.commons.api.format.ContentType;
import org.apache.olingo.server.api.OData;
import org.apache.olingo.server.api.ServiceMetadata;
import org.apache.olingo.server.api.serializer.EdmDeltaSerializer;
import org.apache.olingo.server.api.serializer.EntityCollectionSerializerOptions;
import org.apache.olingo.server.api.serializer.SerializerException;
import org.apache.olingo.server.api.uri.UriHelper;
import org.apache.olingo.server.api.uri.queryoption.CountOption;
import org.apache.olingo.server.api.uri.queryoption.ExpandOption;
import org.apache.olingo.server.api.uri.queryoption.SelectOption;
import org.apache.olingo.server.core.serializer.ExpandSelectMock;
import org.apache.olingo.server.tecsvc.MetadataETagSupport;
import org.apache.olingo.server.tecsvc.data.DataProvider;
import org.apache.olingo.server.tecsvc.provider.EdmTechProvider;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
public class JsonDeltaSerializerTest {
final EdmDeltaSerializer ser;
private static final OData odata = OData.newInstance();
private static final ServiceMetadata metadata = odata.createServiceMetadata(
new EdmTechProvider(), Collections.<EdmxReference> emptyList(), new MetadataETagSupport("W/\"metadataETag\""));
private static final EdmEntityContainer entityContainer = metadata.getEdm().getEntityContainer();
private final DataProvider data = new DataProvider(odata, metadata.getEdm());
public JsonDeltaSerializerTest() throws SerializerException {
List<String> versions = new ArrayList<String>();
versions.add("4.0");
ser = OData.newInstance().createEdmDeltaSerializer(ContentType.JSON, versions);
}
@Test
public void addedDeltaLink() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setSource(new URI("ESDelta(100)"));
link1.setTarget(new URI("ESAllPrim(0)"));
addedLinks.add(link1 );
delta.getAddedLinks().addAll(addedLinks );
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+ "\"@odata.context\":\"$metadata#ESDelta/$delta\",\"value\":[{"
+ "\"@odata.context\":\"#ESDelta/$link\",\"source\":\"ESDelta(100)\","
+ "\"relationship\":\"NavPropertyETAllPrimOne\","
+ "\"target\":\"ESAllPrim(0)\"}]"
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void deletedDeltaLink() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeltaLink> deletedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setSource(new URI("ESDelta(100)"));
link1.setTarget(new URI("ESAllPrim(0)"));
deletedLinks.add(link1 );
delta.getDeletedLinks().addAll(deletedLinks);
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+ "\"@odata.context\":\"$metadata#ESDelta/$delta\",\"value\":[{"
+ "\"@odata.context\":\"#ESDelta/$deletedLink\",\"source\":\"ESDelta(100)\","
+ "\"relationship\":\"NavPropertyETAllPrimOne\","
+ "\"target\":\"ESAllPrim(0)\"}]"
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void deletedEntity() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeletedEntity> deletedEntity = new ArrayList<DeletedEntity>();
DeletedEntity entity1 = new DeletedEntity();
entity1.setId(new URI("ESDelta(100)"));
entity1.setReason(Reason.deleted);
DeletedEntity entity2 = new DeletedEntity();
entity2.setId(new URI("ESDelta(-32768)"));
entity2.setReason(Reason.changed);
deletedEntity.add(entity1);
deletedEntity.add(entity2);
delta.getDeletedEntities().addAll(deletedEntity);
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+"\"@odata.context\":\"$metadata#ESDelta/$delta\",\"value\":[{"
+ "\"@odata.context\":\"#ESDelta(100)/$deletedEntity\","
+ "\"id\":\"ESDelta(100)\",\"reason\":\"deleted\"},{"
+ "\"@odata.context\":\"#ESDelta(-32768)/$deletedEntity\","
+ "\"id\":\"ESDelta(-32768)\",\"reason\":\"changed\"}]"
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void addedChangedDeltaEntity() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
final Entity entity = data.readAll(edmEntitySet).getEntities().get(0);
final Entity entity2 = data.readAll(edmEntitySet).getEntities().get(1);
List<Entity> addedEntity = new ArrayList<Entity>();
Entity changedEntity = new Entity();
changedEntity.setId(entity2.getId());
changedEntity.addProperty(entity2.getProperty("PropertyString"));
addedEntity.add(entity);
addedEntity.add(changedEntity);
delta.getEntities().addAll(addedEntity);
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+ "\"@odata.context\":\"$metadata#ESDelta/$delta\",\"value\":[{"
+ "\"@odata.id\":\"ESDelta(32767)\",\"PropertyInt16\":32767,"
+ "\"PropertyString\":\"Number:32767\"},{\"@odata.id\":\"ESDelta(-32768)\","
+ "\"PropertyString\":\"Number:-32768\"}]"
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void basicDeltaTest() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setSource(new URI("ESDelta(100)"));
link1.setTarget(new URI("ESAllPrim(0)"));
addedLinks.add(link1 );
delta.getAddedLinks().addAll(addedLinks );
List<DeltaLink> deletedLinks = new ArrayList<DeltaLink>();
DeltaLink delLink = new DeltaLink();
delLink.setRelationship("NavPropertyETAllPrimOne");
delLink.setSource(new URI("ESDelta(100)"));
delLink.setTarget(new URI("ESAllPrim(0)"));
deletedLinks.add(delLink );
delta.getDeletedLinks().addAll(deletedLinks);
List<DeletedEntity> deletedEntity = new ArrayList<DeletedEntity>();
DeletedEntity delEntity1 = new DeletedEntity();
delEntity1.setId(new URI("ESDelta(100)"));
delEntity1.setReason(Reason.deleted);
DeletedEntity delEntity2 = new DeletedEntity();
delEntity2.setId(new URI("ESDelta(-32768)"));
delEntity2.setReason(Reason.changed);
deletedEntity.add(delEntity1);
deletedEntity.add(delEntity2);
delta.getDeletedEntities().addAll(deletedEntity);
final Entity entity = data.readAll(edmEntitySet).getEntities().get(0);
final Entity entity2 = data.readAll(edmEntitySet).getEntities().get(1);
List<Entity> addedEntity = new ArrayList<Entity>();
Entity changedEntity = new Entity();
changedEntity.setId(entity2.getId());
changedEntity.addProperty(entity2.getProperty("PropertyString"));
addedEntity.add(entity);
addedEntity.add(changedEntity);
delta.getEntities().addAll(addedEntity);
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+"\"@odata.context\":\"$metadata#ESDelta/$delta\",\"value\":[{"
+ "\"@odata.id\":\"ESDelta(32767)\",\"PropertyInt16\":32767,"
+ "\"PropertyString\":\"Number:32767\"},{\"@odata.id\":\"ESDelta(-32768)\","
+ "\"PropertyString\":\"Number:-32768\"},{\"@odata.context\":\"#ESDelta(100)/$deletedEntity\","
+ "\"id\":\"ESDelta(100)\",\"reason\":\"deleted\"},"
+ "{\"@odata.context\":\"#ESDelta(-32768)/$deletedEntity\",\"id\":\"ESDelta(-32768)\","
+ "\"reason\":\"changed\"},{\"@odata.context\":\"#ESDelta/$link\",\"source\":\"ESDelta(100)\","
+ "\"relationship\":\"NavPropertyETAllPrimOne\",\"target\":\"ESAllPrim(0)\"},{\"@odata.context\":"
+ "\"#ESDelta/$deletedLink\",\"source\":\"ESDelta(100)\",\"relationship\":\"NavPropertyETAllPrimOne\","
+ "\"target\":\"ESAllPrim(0)\"}]"
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void addedDifferentdDeltaEntity() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESAllPrim");
final EdmEntitySet edmEntitySet2 = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
final Entity entity = data.readAll(edmEntitySet).getEntities().get(0);
List<Entity> addedEntity = new ArrayList<Entity>();
addedEntity.add(entity);
delta.getEntities().addAll(addedEntity);
InputStream stream = ser.entityCollection(metadata, edmEntitySet2.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet2).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+"\"@odata.context\":\"$metadata#ESDelta/$delta\",\"value\":[{"
+ "\"@odata.context\":\"#ESAllPrim/$entity\",\"@odata.id\":\"ESAllPrim(32767)\","
+ "\"PropertyInt16\":32767,\"PropertyString\":\"First Resource - positive values\"}]"
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void testDeltaToken() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setSource(new URI("ESDelta(100)"));
link1.setTarget(new URI("ESAllPrim(0)"));
addedLinks.add(link1 );
delta.getAddedLinks().addAll(addedLinks );
delta.setDeltaLink(new URI("23042017"));
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+ "\"@odata.context\":\"$metadata#ESDelta/$delta\",\"value\":[{"
+ "\"@odata.context\":\"#ESDelta/$link\",\"source\":\"ESDelta(100)\","
+ "\"relationship\":\"NavPropertyETAllPrimOne\","
+ "\"target\":\"ESAllPrim(0)\"}],"
+ "\"@odata.deltaLink\":\"23042017\""
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void testSkipToken() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setSource(new URI("ESDelta(100)"));
link1.setTarget(new URI("ESAllPrim(0)"));
addedLinks.add(link1 );
delta.getAddedLinks().addAll(addedLinks );
delta.setNext(new URI("23042017"));
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+ "\"@odata.context\":\"$metadata#ESDelta/$delta\",\"value\":[{"
+ "\"@odata.context\":\"#ESDelta/$link\",\"source\":\"ESDelta(100)\","
+ "\"relationship\":\"NavPropertyETAllPrimOne\","
+ "\"target\":\"ESAllPrim(0)\"}],"
+ "\"@odata.nextLink\":\"23042017\""
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void testSkipDeltaToken() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setSource(new URI("ESDelta(100)"));
link1.setTarget(new URI("ESAllPrim(0)"));
addedLinks.add(link1 );
delta.getAddedLinks().addAll(addedLinks );
delta.setNext(new URI("23042017"));
delta.setDeltaLink(new URI("02052017"));
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+ "\"@odata.context\":\"$metadata#ESDelta/$delta\",\"value\":[{"
+ "\"@odata.context\":\"#ESDelta/$link\",\"source\":\"ESDelta(100)\","
+ "\"relationship\":\"NavPropertyETAllPrimOne\","
+ "\"target\":\"ESAllPrim(0)\"}],"
+ "\"@odata.nextLink\":\"23042017\""
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void testDeltaCount() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
CountOption countOption = Mockito.mock(CountOption.class);
Mockito.when(countOption.getValue()).thenReturn(true);
Delta delta = new Delta();
delta.setCount(1);
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setSource(new URI("ESDelta(100)"));
link1.setTarget(new URI("ESAllPrim(0)"));
addedLinks.add(link1 );
delta.getAddedLinks().addAll(addedLinks );
delta.setDeltaLink(new URI("23042017"));
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.count(countOption)
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+ "\"@odata.context\":\"$metadata#ESDelta/$delta\","
+ "\"@odata.count\":\"1\","
+ "\"value\":[{"
+ "\"@odata.context\":\"#ESDelta/$link\",\"source\":\"ESDelta(100)\","
+ "\"relationship\":\"NavPropertyETAllPrimOne\","
+ "\"target\":\"ESAllPrim(0)\"}],"
+ "\"@odata.deltaLink\":\"23042017\""
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void testEmptyDelta() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+ "\"@odata.context\":\"$metadata#ESDelta/$delta\",\"value\":[]"
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void testDeltaForStream() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESWithStream");
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setSource(new URI("ESDelta(100)"));
link1.setTarget(new URI("ESAllPrim(0)"));
addedLinks.add(link1 );
delta.getAddedLinks().addAll(addedLinks );
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+ "\"@odata.context\":\"$metadata#ESWithStream/$delta\","
+ "\"value\":[{\"@odata.context\":\"#ESWithStream/$link\","
+ "\"source\":\"ESDelta(100)\",\"relationship\":\"NavPropertyETAllPrimOne\","
+ "\"target\":\"ESAllPrim(0)\"}]"
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void selectInDelta() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
final EdmEntityType entityType = edmEntitySet.getEntityType();
final UriHelper helper = odata.createUriHelper();
final SelectOption select = ExpandSelectMock.mockSelectOption(Collections.singletonList(
ExpandSelectMock.mockSelectItem(entityContainer.getEntitySet("ESAllPrim"), "PropertyString")));
final Entity entity = data.readAll(edmEntitySet).getEntities().get(0);
final Entity entity2 = data.readAll(edmEntitySet).getEntities().get(1);
Delta delta = new Delta();
List<Entity> addedEntity = new ArrayList<Entity>();
Entity changedEntity = new Entity();
changedEntity.setId(entity2.getId());
changedEntity.addProperty(entity2.getProperty("PropertyString"));
changedEntity.addProperty(entity2.getProperty("PropertyInt16"));
addedEntity.add(entity);
addedEntity.add(changedEntity);
delta.getEntities().addAll(addedEntity);
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet)
.selectList(helper.buildContextURLSelectList(entityType, null, select))
.suffix(Suffix.ENTITY).build())
.select(select).build()).getContent();
String jsonString = IOUtils.toString(stream);
Assert.assertEquals("{"
+"\"@odata.context\":\"$metadata#ESDelta(PropertyInt16,PropertyString)/$entity/$delta\","
+ "\"value\":[{\"@odata.id\":\"ESDelta(32767)\",\"PropertyString\":\"Number:32767\"},"
+ "{\"@odata.id\":\"ESDelta(-32768)\",\"PropertyString\":\"Number:-32768\"}]}",
jsonString);
}
@Test
public void testCollPropertyInDelta() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESCollAllPrim");
final Entity entity = data.readAll(edmEntitySet).getEntities().get(0);
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
List<Entity> addedEntities = new ArrayList<Entity>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setSource(new URI("ESDelta(100)"));
link1.setTarget(new URI("ESAllPrim(0)"));
addedLinks.add(link1 );
addedEntities.add(entity);
delta.getAddedLinks().addAll(addedLinks );
delta.getEntities().addAll(addedEntities);
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+ "\"@odata.context\":\"$metadata#ESCollAllPrim/$delta\","
+ "\"value\":[{\"@odata.id\":\"ESCollAllPrim(1)\",\"PropertyInt16\":1,"
+ "\"CollPropertyString\":[\"Employee1@company.example\",\"Employee2@company.example\","
+ "\"Employee3@company.example\"],\"CollPropertyBoolean\":[true,false,true],"
+ "\"CollPropertyByte\":[50,200,249],\"CollPropertySByte\":[-120,120,126],\"CollPropertyInt16\":"
+ "[1000,2000,30112],\"CollPropertyInt32\":[23232323,11223355,10000001],\"CollPropertyInt64\":"
+ "[929292929292,333333333333,444444444444],\"CollPropertySingle\":[1790.0,26600.0,3210.0],"
+ "\"CollPropertyDouble\":[-17900.0,-2.78E7,3210.0],\"CollPropertyDecimal\":"
+ "[12,-2,1234],\"CollPropertyBinary\":"
+ "[\"q83v\",\"ASNF\",\"VGeJ\"],\"CollPropertyDate\":[\"1958-12-03\",\"1999-08-05\",\"2013-06-25\"],"
+ "\"CollPropertyDateTimeOffset\":[\"2015-08-12T03:08:34Z\",\"1970-03-28T12:11:10Z\","
+ "\"1948-02-17T09:09:09Z\"],"
+ "\"CollPropertyDuration\":[\"PT13S\",\"PT5H28M0S\",\"PT1H0S\"],\"CollPropertyGuid\":"
+ "[\"ffffff67-89ab-cdef-0123-456789aaaaaa\",\"eeeeee67-89ab-cdef-0123-456789bbbbbb\","
+ "\"cccccc67-89ab-cdef-0123-456789cccccc\"],\"CollPropertyTimeOfDay\":[\"04:14:13\",\"23:59:59\","
+ "\"01:12:33\"]},{\"@odata.context\":\"#ESCollAllPrim/$link\",\"source\":\"ESDelta(100)\",\"relationship\":"
+ "\"NavPropertyETAllPrimOne\",\"target\":\"ESAllPrim(0)\"}]"
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void testComplexCollPropertyInDelta() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESKeyNav");
final Entity entity = data.readAll(edmEntitySet).getEntities().get(0);
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
List<Entity> addedEntities = new ArrayList<Entity>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setSource(new URI("ESDelta(100)"));
link1.setTarget(new URI("ESAllPrim(0)"));
addedLinks.add(link1 );
addedEntities.add(entity);
delta.getAddedLinks().addAll(addedLinks );
delta.getEntities().addAll(addedEntities);
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+"\"@odata.context\":\"$metadata#ESKeyNav/$delta\","
+ "\"value\":[{\"@odata.id\":\"ESKeyNav(1)\",\"PropertyInt16\":1,"
+ "\"PropertyString\":\"I am String Property 1\",\"PropertyCompNav\":"
+ "{\"PropertyInt16\":1},\"PropertyCompAllPrim\":{\"PropertyString\":"
+ "\"First Resource - positive values\",\"PropertyBinary\":"
+ "\"ASNFZ4mrze8=\",\"PropertyBoolean\":true,\"PropertyByte\":"
+ "255,\"PropertyDate\":\"2012-12-03\",\"PropertyDateTimeOffset\":"
+ "\"2012-12-03T07:16:23Z\",\"PropertyDecimal\":"
+ "34,\"PropertySingle\":1.79E20,\"PropertyDouble\":-1.79E20,\"PropertyDuration\":\"PT6S\",\"PropertyGuid\":"
+ "\"01234567-89ab-cdef-0123-456789abcdef\",\"PropertyInt16\":32767,"
+ "\"PropertyInt32\":2147483647,\"PropertyInt64\":"
+ "9223372036854775807,\"PropertySByte\":127,\"PropertyTimeOfDay\":\"21:05:59\"},\"PropertyCompTwoPrim\":"
+ "{\"PropertyInt16\":16,\"PropertyString\":\"Test123\"},\"CollPropertyString\":"
+ "[\"Employee1@company.example\","
+ "\"Employee2@company.example\",\"Employee3@company.example\"],\"CollPropertyInt16\":[1000,2000,30112],"
+ "\"CollPropertyComp\":[{\"PropertyInt16\":1,\"PropertyComp\":{\"PropertyString\":"
+ "\"First Resource - positive values\","
+ "\"PropertyBinary\":\"ASNFZ4mrze8=\",\"PropertyBoolean\":true,\"PropertyByte\":"
+ "255,\"PropertyDate\":\"2012-12-03\","
+ "\"PropertyDateTimeOffset\":\"2012-12-03T07:16:23Z\",\"PropertyDecimal\":34,\"PropertySingle\":1.79E20,"
+ "\"PropertyDouble\":-1.79E20,\"PropertyDuration\":\"PT6S\",\"PropertyGuid\":"
+ "\"01234567-89ab-cdef-0123-456789abcdef\","
+ "\"PropertyInt16\":32767,\"PropertyInt32\":2147483647,\"PropertyInt64\":"
+ "9223372036854775807,\"PropertySByte\":127,"
+ "\"PropertyTimeOfDay\":\"21:05:59\"}},{\"PropertyInt16\":2,\"PropertyComp\":{\"PropertyString\":"
+ "\"First Resource - positive values\",\"PropertyBinary\":\"ASNFZ4mrze8=\",\"PropertyBoolean\":true,"
+ "\"PropertyByte\":255,\"PropertyDate\":\"2012-12-03\",\"PropertyDateTimeOffset\":\"2012-12-03T07:16:23Z\","
+ "\"PropertyDecimal\":34,\"PropertySingle\":1.79E20,\"PropertyDouble\":-1.79E20,"
+ "\"PropertyDuration\":\"PT6S\","
+ "\"PropertyGuid\":\"01234567-89ab-cdef-0123-456789abcdef\","
+ "\"PropertyInt16\":32767,\"PropertyInt32\":2147483647,"
+ "\"PropertyInt64\":9223372036854775807,\"PropertySByte\":127,"
+ "\"PropertyTimeOfDay\":\"21:05:59\"}},{\"PropertyInt16\":3,"
+ "\"PropertyComp\":{\"PropertyString\":\"First Resource - positive values\","
+ "\"PropertyBinary\":\"ASNFZ4mrze8=\","
+ "\"PropertyBoolean\":true,\"PropertyByte\":255,\"PropertyDate\":\"2012-12-03\",\"PropertyDateTimeOffset\":"
+ "\"2012-12-03T07:16:23Z\",\"PropertyDecimal\":34,\"PropertySingle\":1.79E20,\"PropertyDouble\":-1.79E20,"
+ "\"PropertyDuration\":\"PT6S\",\"PropertyGuid\":"
+ "\"01234567-89ab-cdef-0123-456789abcdef\",\"PropertyInt16\":32767,"
+ "\"PropertyInt32\":2147483647,\"PropertyInt64\":9223372036854775807,"
+ "\"PropertySByte\":127,\"PropertyTimeOfDay\":"
+ "\"21:05:59\"}}],\"PropertyCompCompNav\":{\"PropertyString\":\"1\","
+ "\"PropertyCompNav\":{\"PropertyInt16\":1}}},"
+ "{\"@odata.context\":\"#ESKeyNav/$link\",\"source\":\"ESDelta(100)\","
+ "\"relationship\":\"NavPropertyETAllPrimOne\","
+ "\"target\":\"ESAllPrim(0)\"}]"
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test
public void navigationEntityInDeltaEntity() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
final Entity entity = data.readAll(edmEntitySet).getEntities().get(0);
final Entity entity2 = data.readAll(edmEntitySet).getEntities().get(3);
final ExpandOption expand = ExpandSelectMock.mockExpandOption(Collections.singletonList(
ExpandSelectMock.mockExpandItem(edmEntitySet, "NavPropertyETAllPrimOne")));
List<Entity> addedEntity = new ArrayList<Entity>();
Entity changedEntity = new Entity();
changedEntity.setId(entity.getId());
changedEntity.addProperty(entity.getProperty("PropertyString"));
addedEntity.add(entity);
addedEntity.add(entity2);
delta.getEntities().addAll(addedEntity);
InputStream stream = ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build()).expand(expand)
.build()).getContent();
String jsonString = IOUtils.toString(stream);
final String expectedResult = "{"
+ "\"@odata.context\":\"$metadata#ESDelta/$delta\",\"value\":"
+ "[{\"@odata.id\":\"ESDelta(32767)\",\"PropertyInt16\":32767,\"PropertyString\":"
+ "\"Number:32767\"},{\"@odata.id\":\"ESDelta(100)\",\"PropertyInt16\":100,"
+ "\"PropertyString\":\"Number:100\"}]"
+ "}";
Assert.assertNotNull(jsonString);
Assert.assertEquals(expectedResult, jsonString);
}
@Test(expected = SerializerException.class)
public void negativeDeltaEntityTest() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
final Entity entity2 = data.readAll(edmEntitySet).getEntities().get(1);
List<Entity> addedEntity = new ArrayList<Entity>();
Entity changedEntity = new Entity();
changedEntity.addProperty(entity2.getProperty("PropertyString"));
addedEntity.add(changedEntity);
delta.getEntities().addAll(addedEntity);
ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
}
@Test(expected = SerializerException.class)
public void negativeLinkDeltaTest1() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = new DeltaLink();
link1.setSource(new URI("ESDelta(100)"));
link1.setTarget(new URI("ESAllPrim(0)"));
addedLinks.add(link1 );
delta.getAddedLinks().addAll(addedLinks );
ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
}
@Test(expected = SerializerException.class)
public void negativeLinkDeltaTest2() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setTarget(new URI("ESAllPrim(0)"));
addedLinks.add(link1 );
delta.getAddedLinks().addAll(addedLinks );
ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
}
@Test(expected = SerializerException.class)
public void negativeLinkDeltaTest3() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = new DeltaLink();
link1.setRelationship("NavPropertyETAllPrimOne");
link1.setSource(new URI("ESDelta(100)"));
addedLinks.add(link1 );
delta.getAddedLinks().addAll(addedLinks );
ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
}
@Test(expected = SerializerException.class)
public void negativeLinkDeltaTest4() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeltaLink> addedLinks = new ArrayList<DeltaLink>();
DeltaLink link1 = null;
addedLinks.add(link1 );
delta.getAddedLinks().addAll(addedLinks );
ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
}
@Test(expected = SerializerException.class)
public void negativeDeltaDeletedEntityTest1() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeletedEntity> deletedEntity = new ArrayList<DeletedEntity>();
DeletedEntity delEntity1 = new DeletedEntity();
delEntity1.setReason(Reason.deleted);
deletedEntity.add(delEntity1);
delta.getDeletedEntities().addAll(deletedEntity);
ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
}
@Test(expected = SerializerException.class)
public void negativeDeltaDeletedEntityTest2() throws Exception {
final EdmEntitySet edmEntitySet = entityContainer.getEntitySet("ESDelta");
Delta delta = new Delta();
List<DeletedEntity> deletedEntity = new ArrayList<DeletedEntity>();
DeletedEntity delEntity1 = new DeletedEntity();
delEntity1.setId(new URI("ESDelta(100)"));
deletedEntity.add(delEntity1);
delta.getDeletedEntities().addAll(deletedEntity);
ser.entityCollection(metadata, edmEntitySet.getEntityType(), delta ,
EntityCollectionSerializerOptions.with()
.contextURL(ContextURL.with().entitySet(edmEntitySet).build())
.build()).getContent();
}
}
|
apache/poi | 38,269 | poi-scratchpad/src/test/java/org/apache/poi/hslf/usermodel/TestBugs.java | /* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hslf.usermodel;
import static org.apache.poi.POITestCase.assertContains;
import static org.apache.poi.POITestCase.assertStartsWith;
import static org.apache.poi.hslf.HSLFTestDataSamples.writeOutAndReadBack;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.awt.Color;
import java.awt.geom.Ellipse2D;
import java.awt.geom.Path2D;
import java.awt.geom.Rectangle2D;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.poi.POIDataSamples;
import org.apache.poi.common.usermodel.fonts.FontGroup;
import org.apache.poi.ddf.AbstractEscherOptRecord;
import org.apache.poi.ddf.EscherArrayProperty;
import org.apache.poi.ddf.EscherColorRef;
import org.apache.poi.ddf.EscherPropertyTypes;
import org.apache.poi.hslf.HSLFTestDataSamples;
import org.apache.poi.hslf.exceptions.OldPowerPointFormatException;
import org.apache.poi.hslf.model.HeadersFooters;
import org.apache.poi.hslf.record.DocInfoListContainer;
import org.apache.poi.hslf.record.Document;
import org.apache.poi.hslf.record.RecordTypes;
import org.apache.poi.hslf.record.SlideListWithText;
import org.apache.poi.hslf.record.SlideListWithText.SlideAtomsSet;
import org.apache.poi.hslf.record.TextHeaderAtom;
import org.apache.poi.hslf.record.VBAInfoAtom;
import org.apache.poi.hslf.record.VBAInfoContainer;
import org.apache.poi.poifs.filesystem.POIFSFileSystem;
import org.apache.poi.poifs.macros.VBAMacroReader;
import org.apache.poi.sl.draw.DrawPaint;
import org.apache.poi.sl.extractor.SlideShowExtractor;
import org.apache.poi.sl.usermodel.*;
import org.apache.poi.sl.usermodel.PaintStyle.SolidPaint;
import org.apache.poi.sl.usermodel.PictureData.PictureType;
import org.apache.poi.sl.usermodel.TextParagraph.TextAlign;
import org.apache.poi.util.LittleEndian;
import org.apache.poi.util.StringUtil;
import org.apache.poi.util.Units;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.junit.jupiter.params.provider.ValueSource;
/**
* Testcases for bugs entered in bugzilla
* the Test name contains the bugzilla bug id
*/
public final class TestBugs {
/**
* Bug 41384: Array index wrong in record creation
*/
@Test
void bug41384() throws IOException {
try (HSLFSlideShow ppt = open("41384.ppt")) {
assertEquals(1, ppt.getSlides().size());
List<HSLFPictureData> pict = ppt.getPictureData();
assertEquals(2, pict.size());
assertEquals(PictureType.JPEG, pict.get(0).getType());
assertEquals(PictureType.JPEG, pict.get(1).getType());
}
}
@Test
void fillTypesPaintMapping_65653() throws IOException {
try (HSLFSlideShow ppt = open("41246-2.ppt")) {
HSLFAutoShape as = (HSLFAutoShape) ppt.getSlides().get(15).getShapes().get(0);
HSLFFill f = as.getFill();
assertEquals(HSLFFill.FILL_TEXTURE, f.getFillType());
PaintStyle p = f.getFillStyle().getPaint();
assertTrue(p instanceof PaintStyle.TexturePaint);
}
try (HSLFSlideShow ppt = open("backgrounds.ppt")) {
HSLFAutoShape as = (HSLFAutoShape) ppt.getSlides().get(1).getShapes().get(0);
HSLFFill f = as.getFill();
assertEquals(HSLFFill.FILL_BACKGROUND, f.getFillType());
PaintStyle p = as.getFillStyle().getPaint();
assertTrue(p instanceof SolidPaint);
assertEquals(Color.WHITE, ((SolidPaint)p).getSolidColor().getColor());
}
}
/**
* First fix from Bug 42474: NPE in RichTextRun.isBold()
* when the RichTextRun comes from a Notes model object
*/
@Test
void bug42474_1() throws IOException {
try (HSLFSlideShow ppt = open("42474-1.ppt")) {
assertEquals(2, ppt.getSlides().size());
List<HSLFTextParagraph> txrun;
HSLFNotes notes;
notes = ppt.getSlides().get(0).getNotes();
assertNotNull(notes);
txrun = notes.getTextParagraphs().get(0);
assertEquals("Notes-1", HSLFTextParagraph.getRawText(txrun));
assertFalse(txrun.get(0).getTextRuns().get(0).isBold());
//notes for the second slide are in bold
notes = ppt.getSlides().get(1).getNotes();
assertNotNull(notes);
txrun = notes.getTextParagraphs().get(0);
assertEquals("Notes-2", HSLFTextParagraph.getRawText(txrun));
assertTrue(txrun.get(0).getTextRuns().get(0).isBold());
}
}
/**
* Second fix from Bug 42474: Incorrect matching of notes to slides
*/
@Test
void bug42474_2() throws IOException {
try (HSLFSlideShow ppt = open("42474-2.ppt")) {
//map slide number and starting phrase of its notes
Map<Integer, String> notesMap = new HashMap<>();
notesMap.put(4, "For decades before calculators");
notesMap.put(5, "Several commercial applications");
notesMap.put(6, "There are three variations of LNS that are discussed here");
notesMap.put(7, "Although multiply and square root are easier");
notesMap.put(8, "The bus Z is split into Z_H and Z_L");
for (HSLFSlide slide : ppt.getSlides()) {
Integer slideNumber = slide.getSlideNumber();
HSLFNotes notes = slide.getNotes();
if (notesMap.containsKey(slideNumber)) {
assertNotNull(notes);
String text = HSLFTextParagraph.getRawText(notes.getTextParagraphs().get(0));
String startingPhrase = notesMap.get(slideNumber);
assertStartsWith("Notes for slide " + slideNumber + " must start with starting phrase",
text, startingPhrase);
}
}
}
}
/**
* Bug 42485: All TextBoxes inside ShapeGroups have null TextRuns
*/
@Test
void bug42485 () throws IOException {
try (HSLFSlideShow ppt = open("42485.ppt")) {
for (HSLFShape shape : ppt.getSlides().get(0).getShapes()) {
if (shape instanceof HSLFGroupShape) {
HSLFGroupShape group = (HSLFGroupShape) shape;
for (HSLFShape sh : group.getShapes()) {
if (sh instanceof HSLFTextBox) {
HSLFTextBox txt = (HSLFTextBox) sh;
assertNotNull(txt.getTextParagraphs());
}
}
}
}
}
}
/**
* Bug 42484: NullPointerException from ShapeGroup.getAnchor()
*/
@Test
void bug42484 () throws IOException {
try (HSLFSlideShow ppt = open("42485.ppt")) {
for (HSLFShape shape : ppt.getSlides().get(0).getShapes()) {
if (shape instanceof HSLFGroupShape) {
HSLFGroupShape group = (HSLFGroupShape) shape;
assertNotNull(group.getAnchor());
for (HSLFShape sh : group.getShapes()) {
assertNotNull(sh.getAnchor());
}
}
}
}
}
/**
* Bug 41381: Exception from Slide.getMasterSheet() on a seemingly valid PPT file
*/
@Test
void bug41381() throws IOException {
try (HSLFSlideShow ppt = open("alterman_security.ppt")) {
assertEquals(1, ppt.getSlideMasters().size());
assertEquals(1, ppt.getTitleMasters().size());
boolean isFirst = true;
for (HSLFSlide slide : ppt.getSlides()) {
HSLFMasterSheet master = slide.getMasterSheet();
// the first slide follows TitleMaster
assertTrue(isFirst ? master instanceof HSLFTitleMaster : master instanceof HSLFSlideMaster);
isFirst = false;
}
}
}
/**
* Bug 42524: NPE in Shape.getShapeType()
*/
@Test
void bug42524 () throws IOException {
try (HSLFSlideShow ppt = open("42486.ppt")) {
//walk down the tree and see if there were no errors while reading
for (HSLFSlide slide : ppt.getSlides()) {
for (HSLFShape shape : slide.getShapes()) {
assertNotNull(shape.getShapeName());
if (shape instanceof HSLFGroupShape) {
HSLFGroupShape group = (HSLFGroupShape) shape;
for (HSLFShape comps : group.getShapes()) {
assertNotNull(comps.getShapeName());
}
}
}
}
}
}
/**
* Bug 42520: NPE in Picture.getPictureData()
*/
@SuppressWarnings("unused")
@Test
void bug42520() throws IOException {
try (HSLFSlideShow ppt = open("42520.ppt")) {
//test case from the bug report
HSLFGroupShape shapeGroup = (HSLFGroupShape) ppt.getSlides().get(11).getShapes().get(10);
HSLFPictureShape picture = (HSLFPictureShape) shapeGroup.getShapes().get(0);
picture.getPictureData();
boolean found = false;
//walk down the tree and see if there were no errors while reading
for (HSLFSlide slide : ppt.getSlides()) {
for (HSLFShape shape : slide.getShapes()) {
if (shape instanceof HSLFGroupShape) {
HSLFGroupShape group = (HSLFGroupShape) shape;
for (HSLFShape comp : group.getShapes()) {
if (comp instanceof HSLFPictureShape) {
HSLFPictureData pict = ((HSLFPictureShape) comp).getPictureData();
assertEquals("Rectangle 35893", comp.getShapeName());
found = true;
}
}
}
}
}
assertTrue(found);
}
}
/**
* Bug 38256: RuntimeException: Couldn't instantiate the class for type with id 0.
* ( also fixed followup: getTextRuns() returns no text )
*/
@Test
void bug38256 () throws IOException {
try (HSLFSlideShow ppt = open("38256.ppt")) {
List<HSLFSlide> slide = ppt.getSlides();
assertEquals(1, slide.size());
List<List<HSLFTextParagraph>> paras = slide.get(0).getTextParagraphs();
assertEquals(4, paras.size());
Set<String> expected = new HashSet<>();
expected.add("\u201CHAPPY BIRTHDAY SCOTT\u201D");
expected.add("Have a HAPPY DAY");
expected.add("PS Nobody is allowed to hassle Scott TODAY\u2026");
expected.add("Drinks will be in the Boardroom at 5pm today to celebrate Scott\u2019s B\u2019Day\u2026 See you all there!");
for (List<HSLFTextParagraph> para : paras) {
String text = HSLFTextParagraph.getRawText(para);
assertTrue(expected.contains(text), text);
}
}
}
/**
* Bug 38256: RuntimeException: Couldn't instantiate the class for type with id 0.
* ( also fixed followup: getTextRuns() returns no text )
*/
@Test
void bug43781() throws IOException {
try (HSLFSlideShow ppt = open("43781.ppt")) {
// Check the first slide
HSLFSlide slide = ppt.getSlides().get(0);
List<List<HSLFTextParagraph>> slTr = slide.getTextParagraphs();
// Has 3 text paragraphs, two from slide text (empty title / filled body), one from drawing
assertEquals(3, slTr.size());
assertFalse(slTr.get(0).get(0).isDrawingBased());
assertFalse(slTr.get(1).get(0).isDrawingBased());
assertTrue(slTr.get(2).get(0).isDrawingBased());
assertEquals("", HSLFTextParagraph.getRawText(slTr.get(0)));
assertEquals("First run", HSLFTextParagraph.getRawText(slTr.get(1)));
assertEquals("Second run", HSLFTextParagraph.getRawText(slTr.get(2)));
// Check the shape based text runs
List<HSLFTextParagraph> lst = new ArrayList<>();
for (HSLFShape shape : slide.getShapes()) {
if (shape instanceof HSLFTextShape) {
List<HSLFTextParagraph> textRun = ((HSLFTextShape) shape).getTextParagraphs();
lst.addAll(textRun);
}
}
// There are two shapes in the ppt
assertEquals(2, lst.size());
assertEquals("First runSecond run", HSLFTextParagraph.getRawText(lst));
}
}
/**
* Bug 44296: HSLF Not Extracting Slide Background Image
*/
@Test
void bug44296 () throws IOException {
try (HSLFSlideShow ppt = open("44296.ppt")) {
HSLFSlide slide = ppt.getSlides().get(0);
HSLFBackground b = slide.getBackground();
assertNotNull(b);
HSLFFill f = b.getFill();
assertEquals(HSLFFill.FILL_PICTURE, f.getFillType());
HSLFPictureData pict = f.getPictureData();
assertNotNull(pict);
assertEquals(PictureType.JPEG, pict.getType());
}
}
/**
* Bug 41071: Will not extract text from Powerpoint TextBoxes
*/
@Test
void bug41071() throws IOException {
try (HSLFSlideShow ppt = open("41071.ppt")) {
HSLFSlide slide = ppt.getSlides().get(0);
List<HSLFShape> sh = slide.getShapes();
assertEquals(1, sh.size());
assertTrue(sh.get(0) instanceof HSLFTextShape);
HSLFTextShape tx = (HSLFTextShape) sh.get(0);
assertEquals("Fundera, planera och involvera.", HSLFTextParagraph.getRawText(tx.getTextParagraphs()));
List<List<HSLFTextParagraph>> run = slide.getTextParagraphs();
assertEquals(3, run.size());
assertEquals("Fundera, planera och involvera.", HSLFTextParagraph.getRawText(run.get(2)));
}
}
/**
* PowerPoint 95 files should throw a more helpful exception
*/
@Test
void bug41711() throws IOException {
// New file is fine
open("SampleShow.ppt").close();
// PowerPoint 95 gives an old format exception
assertThrows(OldPowerPointFormatException.class, () -> open("PPT95.ppt").close());
}
/**
* Changing text from Ascii to Unicode
*/
@Test
void bug49648() throws IOException {
try (HSLFSlideShow ppt = open("49648.ppt")) {
for (HSLFSlide slide : ppt.getSlides()) {
for (List<HSLFTextParagraph> run : slide.getTextParagraphs()) {
String repl = "With \u0123\u1234\u5678 unicode";
String text = HSLFTextParagraph.getRawText(run);
text = text.replace("{txtTot}", repl);
HSLFTextParagraph.setText(run, text);
if (text.contains(repl)) {
assertTrue(HSLFTextParagraph.getText(run).contains(repl));
}
}
}
}
}
/**
* Bug 45776: Fix corrupt file problem using TextRun.setText
*/
@Test
void bug45776() throws IOException {
DateFormat df = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.ROOT);
try (HSLFSlideShow ppt = open("45776.ppt")) {
// get slides
for (HSLFSlide slide : ppt.getSlides()) {
for (HSLFShape shape : slide.getShapes()) {
if (!(shape instanceof HSLFTextBox)) {
continue;
}
HSLFTextBox tb = (HSLFTextBox) shape;
// work with TextBox
String str = tb.getText();
if (!str.contains("$$DATE$$")) {
continue;
}
str = str.replace("$$DATE$$", df.format(new Date()));
tb.setText(str);
List<HSLFTextParagraph> tr = tb.getTextParagraphs();
assertEquals(str.length() + 1, tr.get(0).getParagraphStyle().getCharactersCovered());
assertEquals(str.length() + 1, tr.get(0).getTextRuns().get(0).getCharacterStyle().getCharactersCovered());
}
}
}
}
@Test
void bug55732() throws IOException {
try (HSLFSlideShow ppt = open("bug55732.ppt")) {
/* Iterate over slides and extract text */
for (HSLFSlide slide : ppt.getSlides()) {
HeadersFooters hf = slide.getHeadersFooters();
assertDoesNotThrow(hf::isHeaderVisible);
}
}
}
@Test
void bug56260() throws IOException {
try (HSLFSlideShow ppt = open("56260.ppt")) {
List<HSLFSlide> _slides = ppt.getSlides();
assertEquals(13, _slides.size());
// Check the number of TextHeaderAtoms on Slide 1
Document dr = ppt.getDocumentRecord();
SlideListWithText slidesSLWT = dr.getSlideSlideListWithText();
assertNotNull(slidesSLWT);
SlideAtomsSet s1 = slidesSLWT.getSlideAtomsSets()[0];
int tha = 0;
for (org.apache.poi.hslf.record.Record r : s1.getSlideRecords()) {
if (r instanceof TextHeaderAtom) {
tha++;
}
}
assertEquals(2, tha);
// Check to see that we have a pair next to each other
assertEquals(TextHeaderAtom.class, s1.getSlideRecords()[0].getClass());
assertEquals(TextHeaderAtom.class, s1.getSlideRecords()[1].getClass());
// Check the number of text runs based on the slide (not textbox)
// Will have skipped the empty one
int str = 0;
for (List<HSLFTextParagraph> tr : _slides.get(0).getTextParagraphs()) {
if (!tr.get(0).isDrawingBased()) {
str++;
}
}
assertEquals(2, str);
}
}
@Test
void bug49541() throws IOException {
try (HSLFSlideShow ppt = open("49541_symbol_map.ppt")) {
HSLFSlide slide = ppt.getSlides().get(0);
HSLFGroupShape sg = (HSLFGroupShape) slide.getShapes().get(0);
HSLFTextBox tb = (HSLFTextBox) sg.getShapes().get(0);
String text = StringUtil.mapMsCodepointString(tb.getText());
assertEquals("\u226575 years", text);
}
}
@ParameterizedTest
@CsvSource({
// bug47261.ppt has actually 16 slides, but also non-conforming multiple document records
"bug47261.ppt, 1",
"bug56240.ppt, 105",
"bug58516.ppt, 5",
"57272_corrupted_usereditatom.ppt, 6",
"37625.ppt, 29",
// Bug 41246: AIOOB with illegal note references
"41246-1.ppt, 36",
"41246-2.ppt, 16",
// Bug 44770: java.lang.RuntimeException: Couldn't instantiate the class for
// type with id 1036 on class class org.apache.poi.hslf.record.PPDrawing
"44770.ppt, 19",
// Bug 42486: Failure parsing a seemingly valid PPT
"42486.ppt, 33"
})
void testFile(String file, int slideCnt) throws IOException {
try (HSLFSlideShow ppt = open(file)) {
for (HSLFSlide slide : ppt.getSlides()) {
List<HSLFShape> shape = slide.getShapes();
assertFalse(shape.isEmpty());
}
assertNotNull(ppt.getSlides().get(0));
ppt.removeSlide(0);
ppt.createSlide();
try (HSLFSlideShow ppt2 = writeOutAndReadBack(ppt)) {
assertEquals(slideCnt, ppt2.getSlides().size());
};
}
}
@Test
void bug46441() throws IOException {
try (HSLFSlideShow ppt = open("bug46441.ppt")) {
HSLFAutoShape as = (HSLFAutoShape) ppt.getSlides().get(0).getShapes().get(0);
AbstractEscherOptRecord opt = as.getEscherOptRecord();
EscherArrayProperty ep = HSLFShape.getEscherProperty(opt, EscherPropertyTypes.FILL__SHADECOLORS);
double[][] exp = {
// r, g, b, position
{94, 158, 255, 0},
{133, 194, 255, 0.399994},
{196, 214, 235, 0.699997},
{255, 235, 250, 1}
};
int i = 0;
for (byte[] data : ep) {
EscherColorRef ecr = new EscherColorRef(data, 0, 4);
int[] rgb = ecr.getRGB();
double pos = Units.fixedPointToDouble(LittleEndian.getInt(data, 4));
assertEquals((int) exp[i][0], rgb[0]);
assertEquals((int) exp[i][1], rgb[1]);
assertEquals((int) exp[i][2], rgb[2]);
assertEquals(exp[i][3], pos, 0.01);
i++;
}
}
}
@Test
void bug45124() throws IOException {
try (HSLFSlideShow ppt = open("bug45124.ppt")) {
Slide<?, ?> slide1 = ppt.getSlides().get(1);
TextBox<?, ?> res = slide1.createTextBox();
res.setAnchor(new java.awt.Rectangle(60, 150, 700, 100));
res.setText("I am italic-false, bold-true inserted text");
TextParagraph<?, ?, ?> tp = res.getTextParagraphs().get(0);
TextRun rt = tp.getTextRuns().get(0);
rt.setItalic(false);
assertTrue(rt.isBold());
tp.setBulletStyle(Color.red, 'A');
try (SlideShow<?, ?> ppt2 = writeOutAndReadBack(ppt)) {
res = (TextBox<?, ?>) ppt2.getSlides().get(1).getShapes().get(1);
tp = res.getTextParagraphs().get(0);
rt = tp.getTextRuns().get(0);
assertFalse(rt.isItalic());
assertTrue(rt.isBold());
PaintStyle ps = tp.getBulletStyle().getBulletFontColor();
assertTrue(ps instanceof SolidPaint);
Color actColor = DrawPaint.applyColorTransform(((SolidPaint) ps).getSolidColor());
assertEquals(Color.red, actColor);
assertEquals("A", tp.getBulletStyle().getBulletCharacter());
}
}
}
@Test
void bug45088() throws IOException {
String template = "[SYSDATE]";
String textExp = "REPLACED_DATE_WITH_A_LONG_ONE";
try (HSLFSlideShow ppt1 = open("bug45088.ppt")) {
for (HSLFSlide slide : ppt1.getSlides()) {
for (List<HSLFTextParagraph> paraList : slide.getTextParagraphs()) {
for (HSLFTextParagraph para : paraList) {
for (HSLFTextRun run : para.getTextRuns()) {
String text = run.getRawText();
if (text != null && text.contains(template)) {
String replacedText = text.replace(template, textExp);
run.setText(replacedText);
para.setDirty();
}
}
}
}
}
try (HSLFSlideShow ppt2 = writeOutAndReadBack(ppt1)) {
HSLFTextBox tb = (HSLFTextBox) ppt2.getSlides().get(0).getShapes().get(1);
String textAct = tb.getTextParagraphs().get(0).getTextRuns().get(0).getRawText().trim();
assertEquals(textExp, textAct);
}
}
}
@Test
void bug45908() throws IOException {
try (HSLFSlideShow ppt1 = open("bug45908.ppt")) {
HSLFSlide slide = ppt1.getSlides().get(0);
HSLFAutoShape styleShape = (HSLFAutoShape) slide.getShapes().get(1);
HSLFTextParagraph tp0 = styleShape.getTextParagraphs().get(0);
HSLFTextRun tr0 = tp0.getTextRuns().get(0);
int rows = 5;
int cols = 2;
HSLFTable table = slide.createTable(rows, cols);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < cols; j++) {
HSLFTableCell cell = table.getCell(i, j);
assertNotNull(cell);
cell.setText("Test");
HSLFTextParagraph tp = cell.getTextParagraphs().get(0);
tp.setBulletStyle('%', tp0.getBulletColor(), tp0.getBulletFont(), tp0.getBulletSize());
tp.setIndent(tp0.getIndent());
tp.setTextAlign(tp0.getTextAlign());
tp.setIndentLevel(tp0.getIndentLevel());
tp.setSpaceAfter(tp0.getSpaceAfter());
tp.setSpaceBefore(tp0.getSpaceBefore());
tp.setBulletStyle();
HSLFTextRun tr = tp.getTextRuns().get(0);
tr.setBold(tr0.isBold());
// rt.setEmbossed();
tr.setFontColor(Color.BLACK);
tr.setFontFamily(tr0.getFontFamily());
tr.setFontSize(tr0.getFontSize());
tr.setItalic(tr0.isItalic());
tr.setShadowed(tr0.isShadowed());
tr.setStrikethrough(tr0.isStrikethrough());
tr.setUnderlined(tr0.isUnderlined());
}
}
table.moveTo(100, 100);
try (HSLFSlideShow ppt2 = writeOutAndReadBack(ppt1)) {
HSLFTable tab = (HSLFTable) ppt2.getSlides().get(0).getShapes().get(2);
HSLFTableCell c2 = tab.getCell(0, 0);
assertNotNull(c2);
HSLFTextParagraph tp1 = c2.getTextParagraphs().get(0);
HSLFTextRun tr1 = tp1.getTextRuns().get(0);
assertFalse(tp1.isBullet());
assertEquals(tp0.getBulletColor(), tp1.getBulletColor());
assertEquals(tp0.getBulletFont(), tp1.getBulletFont());
assertEquals(tp0.getBulletSize(), tp1.getBulletSize());
assertEquals(tp0.getIndent(), tp1.getIndent());
assertEquals(tp0.getTextAlign(), tp1.getTextAlign());
assertEquals(tp0.getIndentLevel(), tp1.getIndentLevel());
assertEquals(tp0.getSpaceAfter(), tp1.getSpaceAfter());
assertEquals(tp0.getSpaceBefore(), tp1.getSpaceBefore());
assertEquals(tr0.isBold(), tr1.isBold());
assertNotNull(tr1.getFontColor());
assertEquals(Color.black, DrawPaint.applyColorTransform(tr1.getFontColor().getSolidColor()));
assertEquals(tr0.getFontFamily(), tr1.getFontFamily());
assertEquals(tr0.getFontSize(), tr1.getFontSize());
assertEquals(tr0.isItalic(), tr1.isItalic());
assertEquals(tr0.isShadowed(), tr1.isShadowed());
assertEquals(tr0.isStrikethrough(), tr1.isStrikethrough());
assertEquals(tr0.isUnderlined(), tr1.isUnderlined());
}
}
}
@Test
void bug47904() throws IOException {
try (HSLFSlideShow ppt1 = new HSLFSlideShow()) {
HSLFSlideMaster sm = ppt1.getSlideMasters().get(0);
HSLFAutoShape as = (HSLFAutoShape) sm.getPlaceholder(Placeholder.TITLE);
HSLFTextParagraph tp = as.getTextParagraphs().get(0);
HSLFTextRun tr = tp.getTextRuns().get(0);
tr.setFontFamily("Tahoma");
tr.setShadowed(true);
tr.setFontSize(44.);
tr.setFontColor(Color.red);
tp.setTextAlign(TextAlign.RIGHT);
HSLFTextBox tb = ppt1.createSlide().addTitle();
tb.setText("foobaa");
try (HSLFSlideShow ppt2 = writeOutAndReadBack(ppt1)) {
HSLFTextShape ts = (HSLFTextShape) ppt2.getSlides().get(0).getShapes().get(0);
tp = ts.getTextParagraphs().get(0);
tr = tp.getTextRuns().get(0);
assertNotNull(tr);
assertNotNull(tr.getFontSize());
assertEquals(44., tr.getFontSize(), 0);
assertEquals("Tahoma", tr.getFontFamily());
assertNotNull(tr.getFontColor());
Color colorAct = DrawPaint.applyColorTransform(tr.getFontColor().getSolidColor());
assertEquals(Color.red, colorAct);
assertEquals(TextAlign.RIGHT, tp.getTextAlign());
assertEquals("foobaa", tr.getRawText());
}
}
}
@ParameterizedTest
@ValueSource(strings = {
"bug58718_008524.ppt", "bug58718_008558.ppt", "bug58718_349008.ppt", "bug58718_008495.ppt",
"bug58733_671884.ppt"
})
void bug58718(String file) throws IOException {
File sample = HSLFTestDataSamples.getSampleFile(file);
try (SlideShowExtractor<?,?> ex = new SlideShowExtractor<>(SlideShowFactory.create(sample))) {
assertNotNull(ex.getText());
}
}
@Test
void bug58159() throws IOException {
try (HSLFSlideShow ppt = open("bug58159_headers-and-footers.ppt")) {
HeadersFooters hf = ppt.getSlideHeadersFooters();
assertNull(hf.getHeaderText());
assertEquals("Slide footer", hf.getFooterText());
hf = ppt.getNotesHeadersFooters();
assertEquals("Notes header", hf.getHeaderText());
assertEquals("Notes footer", hf.getFooterText());
HSLFSlide sl = ppt.getSlides().get(0);
hf = sl.getHeadersFooters();
assertNull(hf.getHeaderText());
assertEquals("Slide footer", hf.getFooterText());
for (HSLFShape shape : sl.getShapes()) {
if (shape instanceof HSLFTextShape) {
HSLFTextShape ts = (HSLFTextShape) shape;
Placeholder ph = ts.getPlaceholder();
if (Placeholder.FOOTER == ph) {
assertEquals("Slide footer", ts.getText());
}
}
}
}
}
@Test
void bug55030() throws IOException {
try (HSLFSlideShow ppt = open("bug55030.ppt")) {
String expFamily = "\u96b6\u4e66";
HSLFSlide sl = ppt.getSlides().get(0);
for (List<HSLFTextParagraph> paraList : sl.getTextParagraphs()) {
for (HSLFTextParagraph htp : paraList) {
for (HSLFTextRun htr : htp) {
String actFamily = htr.getFontFamily(FontGroup.EAST_ASIAN);
assertEquals(expFamily, actFamily);
}
}
}
}
}
private static HSLFSlideShow open(String fileName) throws IOException {
File sample = HSLFTestDataSamples.getSampleFile(fileName);
// Note: don't change the code here, it is required for Eclipse to compile the code
SlideShow<?,?> slideShowOrig = SlideShowFactory.create(sample, null, false);
return (HSLFSlideShow)slideShowOrig;
}
@Test
void bug55983() throws IOException {
try (HSLFSlideShow ppt1 = new HSLFSlideShow()) {
HSLFSlide sl = ppt1.createSlide();
assertNotNull(sl.getBackground());
HSLFFill fill = sl.getBackground().getFill();
assertNotNull(fill);
fill.setForegroundColor(Color.blue);
HSLFFreeformShape fs = sl.createFreeform();
Ellipse2D.Double el = new Ellipse2D.Double(0, 0, 300, 200);
fs.setAnchor(new Rectangle2D.Double(100, 100, 300, 200));
fs.setPath(new Path2D.Double(el));
Color cExp = new Color(50, 100, 150, 200);
fs.setFillColor(cExp);
try (HSLFSlideShow ppt2 = writeOutAndReadBack(ppt1)) {
sl = ppt2.getSlides().get(0);
fs = (HSLFFreeformShape) sl.getShapes().get(0);
Color cAct = fs.getFillColor();
assertEquals(cExp.getRed(), cAct.getRed());
assertEquals(cExp.getGreen(), cAct.getGreen());
assertEquals(cExp.getBlue(), cAct.getBlue());
assertEquals(cExp.getAlpha(), cAct.getAlpha(), 1);
PaintStyle ps = fs.getFillStyle().getPaint();
assertTrue(ps instanceof SolidPaint);
ColorStyle cs = ((SolidPaint) ps).getSolidColor();
cAct = cs.getColor();
assertEquals(cExp.getRed(), cAct.getRed());
assertEquals(cExp.getGreen(), cAct.getGreen());
assertEquals(cExp.getBlue(), cAct.getBlue());
assertEquals(255, cAct.getAlpha());
assertEquals(cExp.getAlpha() * 100000. / 255., cs.getAlpha(), 1);
}
}
}
@Test
void bug59302() throws IOException {
//add extraction from PPT
Map<String, String> macros = getMacrosFromHSLF("59302.ppt");
assertNotNull(macros, "couldn't find macros");
assertNotNull(macros.get("Module2"), "couldn't find second module");
assertContains(macros.get("Module2"), "newMacro in Module2");
assertNotNull(macros.get("Module1"), "couldn't find first module");
assertContains(macros.get("Module1"), "Italicize");
macros = getMacrosFromHSLF("SimpleMacro.ppt");
assertNotNull(macros);
assertNotNull(macros.get("Module1"));
assertContains(macros.get("Module1"), "This is a macro slideshow");
}
//It isn't pretty, but it works...
private Map<String, String> getMacrosFromHSLF(String fileName) throws IOException {
try (InputStream is = new FileInputStream(POIDataSamples.getSlideShowInstance().getFile(fileName));
POIFSFileSystem poifs = new POIFSFileSystem(is);
HSLFSlideShow ppt = new HSLFSlideShow(poifs)) {
//TODO: should we run the VBAMacroReader on this poifs?
//TBD: We know that ppt typically don't store macros in the regular place,
//but _can_ they?
//get macro persist id
DocInfoListContainer list = (DocInfoListContainer)ppt.getDocumentRecord().findFirstOfType(RecordTypes.List.typeID);
VBAInfoContainer vbaInfo = (VBAInfoContainer)list.findFirstOfType(RecordTypes.VBAInfo.typeID);
VBAInfoAtom vbaAtom = (VBAInfoAtom)vbaInfo.findFirstOfType(RecordTypes.VBAInfoAtom.typeID);
long persistId = vbaAtom.getPersistIdRef();
for (HSLFObjectData objData : ppt.getEmbeddedObjects()) {
if (objData.getExOleObjStg().getPersistId() == persistId) {
try (VBAMacroReader mr = new VBAMacroReader(objData.getInputStream())) {
return mr.readMacros();
}
}
}
}
return null;
}
/**
* Bug 60294: Add "unknown" ShapeType for 4095
*/
@Test
void bug60294() throws IOException {
try (HSLFSlideShow ppt = open("60294.ppt")) {
List<HSLFShape> shList = ppt.getSlides().get(0).getShapes();
assertEquals(ShapeType.NOT_PRIMITIVE, shList.get(2).getShapeType());
}
}
@Test
void test501RC1Failure() throws Exception {
try (HSLFSlideShow ppt = open("23884_defense_FINAL_OOimport_edit.ppt")) {
List<HSLFShape> shList = ppt.getSlides().get(0).getShapes();
assertEquals(ShapeType.NOT_PRIMITIVE, shList.get(2).getShapeType());
}
}
@Test
void test69697() throws Exception {
try (HSLFSlideShow ppt = open("bug69697.ppt")) {
HSLFSlide slide = ppt.getSlides().get(0);
for (HSLFShape sh : slide.getShapes()) {
if (sh instanceof HSLFPictureShape) {
HSLFPictureShape pict = (HSLFPictureShape) sh;
HSLFPictureData pictData = pict.getPictureData();
assertNotNull(pictData, "PictureData should not be null for shape: " + pict.getShapeName());
byte[] data = pictData.getData();
assertNotNull(data, "Picture data should not be null for shape: " + pict.getShapeName());
PictureData.PictureType type = pictData.getType();
assertNotNull(type, "Picture type should not be null for shape: " + pict.getShapeName());
}
}
}
}
}
|
apache/stratos | 38,367 | components/org.apache.stratos.autoscaler/src/main/java/org/apache/stratos/autoscaler/monitor/component/ApplicationMonitor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.stratos.autoscaler.monitor.component;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.stratos.autoscaler.algorithms.NetworkPartitionAlgorithm;
import org.apache.stratos.autoscaler.algorithms.networkpartition.AllAtOnceAlgorithm;
import org.apache.stratos.autoscaler.algorithms.networkpartition.NetworkPartitionAlgorithmContext;
import org.apache.stratos.autoscaler.algorithms.networkpartition.OneAfterAnotherAlgorithm;
import org.apache.stratos.autoscaler.applications.ApplicationHolder;
import org.apache.stratos.autoscaler.applications.topic.ApplicationBuilder;
import org.apache.stratos.autoscaler.context.AutoscalerContext;
import org.apache.stratos.autoscaler.context.InstanceContext;
import org.apache.stratos.autoscaler.context.application.ParentInstanceContext;
import org.apache.stratos.autoscaler.context.partition.network.NetworkPartitionContext;
import org.apache.stratos.autoscaler.exception.application.DependencyBuilderException;
import org.apache.stratos.autoscaler.exception.application.MonitorNotFoundException;
import org.apache.stratos.autoscaler.exception.application.TopologyInConsistentException;
import org.apache.stratos.autoscaler.exception.policy.PolicyValidationException;
import org.apache.stratos.autoscaler.monitor.Monitor;
import org.apache.stratos.autoscaler.monitor.events.*;
import org.apache.stratos.autoscaler.monitor.events.builder.MonitorStatusEventBuilder;
import org.apache.stratos.autoscaler.pojo.policy.PolicyManager;
import org.apache.stratos.autoscaler.pojo.policy.deployment.ApplicationPolicy;
import org.apache.stratos.autoscaler.util.AutoscalerConstants;
import org.apache.stratos.autoscaler.util.ServiceReferenceHolder;
import org.apache.stratos.common.constants.StratosConstants;
import org.apache.stratos.common.threading.StratosThreadPool;
import org.apache.stratos.messaging.domain.application.Application;
import org.apache.stratos.messaging.domain.application.ApplicationStatus;
import org.apache.stratos.messaging.domain.application.GroupStatus;
import org.apache.stratos.messaging.domain.instance.ApplicationInstance;
import org.apache.stratos.messaging.domain.topology.ClusterStatus;
import org.apache.stratos.messaging.domain.topology.lifecycle.LifeCycleState;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
/**
* ApplicationMonitor is to control the child monitors
*/
public class ApplicationMonitor extends ParentComponentMonitor {
private static final Log log = LogFactory.getLog(ApplicationMonitor.class);
private final ExecutorService executorService;
//Flag to set whether application is terminating
private boolean isTerminating;
private boolean isRestarting;
// Flag to set if forceful un-deployment is invoked for the application.
private boolean force;
public ApplicationMonitor(Application application) throws DependencyBuilderException,
TopologyInConsistentException {
super(application);
int threadPoolSize = Integer.getInteger(AutoscalerConstants.MONITOR_THREAD_POOL_SIZE, 100);
this.executorService = StratosThreadPool.getExecutorService(
AutoscalerConstants.MONITOR_THREAD_POOL_ID, threadPoolSize);
//setting the appId for the application
this.appId = application.getUniqueIdentifier();
}
@Override
public MonitorType getMonitorType() {
return MonitorType.Application;
}
@Override
public void run() {
try {
monitor();
} catch (Exception e) {
log.error("Application monitor failed : " + this.toString(), e);
}
}
/**
* This thread will monitor the children across all the network partitions and take
* decision for scale-up or scale-down
*/
public synchronized void monitor() {
final Collection<NetworkPartitionContext> networkPartitionContexts =
this.getNetworkPartitionContextsMap().values();
Runnable monitoringRunnable = new Runnable() {
@Override
public void run() {
if (log.isDebugEnabled()) {
log.debug("Application monitor is running for [application] " + appId);
}
for (NetworkPartitionContext networkPartitionContext : networkPartitionContexts) {
for (InstanceContext instanceContext : networkPartitionContext.
getInstanceIdToInstanceContextMap().values()) {
ApplicationInstance instance = (ApplicationInstance) instanceIdToInstanceMap.
get(instanceContext.getId());
ParentInstanceContext parentInstanceContext = (ParentInstanceContext) instanceContext;
//stopping the monitoring when the group is inactive/Terminating/Terminated
if (instance.getStatus().getCode() <= ApplicationStatus.Active.getCode()) {
//Gives priority to scaling max out rather than dependency scaling
if (!parentInstanceContext.getIdToScalingOverMaxEvent().isEmpty()) {
//handling the scaling max out of the children
handleScalingMaxOut(parentInstanceContext, networkPartitionContext);
} else if (!parentInstanceContext.getIdToScalingEvent().isEmpty()) {
//handling the dependent scaling for application
handleDependentScaling(parentInstanceContext, networkPartitionContext);
} else if (!parentInstanceContext.getIdToScalingDownBeyondMinEvent().isEmpty()) {
//handling the scale down of the application
handleScalingDownBeyondMin(parentInstanceContext, networkPartitionContext);
}
}
//Resetting the events events
parentInstanceContext.setIdToScalingDownBeyondMinEvent(
new ConcurrentHashMap<String, ScalingDownBeyondMinEvent>());
parentInstanceContext.setIdToScalingEvent(
new ConcurrentHashMap<String, ScalingEvent>());
parentInstanceContext.setIdToScalingOverMaxEvent(
new ConcurrentHashMap<String, ScalingUpBeyondMaxEvent>());
}
}
//When the application is getting un-deployed, need to avoid
// checking the minimum count sanctification
if (!isTerminating()) {
Application application = ApplicationHolder.getApplications().getApplication(appId);
if (application != null) {
List<String> defaultNetworkPartitions = getDefaultNetworkPartitions(application);
//Checking for whether minimum application instances are there.
if (defaultNetworkPartitions != null) {
checkForMinimumApplicationInstances(application, defaultNetworkPartitions);
}
/*//Checking for whether any application instances need to be terminated.
checkForApplicationInstanceTermination(application, defaultNetworkPartitions);*/
}
}
}
};
executorService.execute(monitoringRunnable);
}
private void handleScalingMaxOut(ParentInstanceContext instanceContext,
NetworkPartitionContext networkPartitionContext) {
if (((NetworkPartitionContext) networkPartitionContext).getPendingInstancesCount() == 0) {
//handling the application bursting only when there are no pending instances found
try {
if (log.isInfoEnabled()) {
log.info("Handling application busting, " +
"since resources are exhausted in " +
"this application instance ");
}
handleApplicationBursting();
} catch (TopologyInConsistentException e) {
log.error("Error while bursting the application", e);
} catch (PolicyValidationException e) {
log.error("Error while bursting the application", e);
} catch (MonitorNotFoundException e) {
log.error("Error while bursting the application", e);
}
} else {
if (log.isDebugEnabled()) {
log.debug("Pending Application instance found. " +
"Hence waiting for it to become active");
}
}
}
/**
* Handling the scale-down decision making
*
* @param instanceContext instance-context which can be scaled-down
* @param nwPartitionContext the network-partition-context of the instance
*/
private void handleScalingDownBeyondMin(ParentInstanceContext instanceContext,
NetworkPartitionContext nwPartitionContext) {
//Traverse through all the children to see whether all have sent the scale down
boolean allChildrenScaleDown = false;
for (Monitor monitor : this.aliasToActiveChildMonitorsMap.values()) {
if (instanceContext.getScalingDownBeyondMinEvent(monitor.getId()) == null) {
allChildrenScaleDown = false;
break;
} else {
allChildrenScaleDown = true;
}
}
//all the children sent the scale down only, it will try to scale down
if (allChildrenScaleDown) {
//Need to get the network partition
NetworkPartitionAlgorithmContext algorithmContext = AutoscalerContext.getInstance().
getNetworkPartitionAlgorithmContext(appId);
if (algorithmContext == null) {
String msg = String.format("Network partition algorithm context not found " +
"in registry or in-memory [application-id] %s", appId);
log.error(msg);
throw new RuntimeException(msg);
}
ApplicationPolicy applicationPolicy = PolicyManager.getInstance().
getApplicationPolicy(algorithmContext.getApplicationPolicyId());
if (applicationPolicy == null) {
String msg = String.format("Application policy not found in registry or " +
"in-memory [application-id] %s", appId);
log.error(msg);
throw new RuntimeException(msg);
}
String networkPartitionAlgorithmName = applicationPolicy.getAlgorithm();
if (log.isDebugEnabled()) {
String msg = String.format("Network partition algorithm is %s [application-id] %s",
networkPartitionAlgorithmName, appId);
log.debug(msg);
}
NetworkPartitionAlgorithm algorithm = getNetworkPartitionAlgorithm(
networkPartitionAlgorithmName);
if (algorithm == null) {
String msg = String.format("Couldn't create network partition algorithm " +
"[application-id] %s", appId);
log.error(msg);
throw new RuntimeException(msg);
}
// Check whether the network-partition of the application
// instance belongs to default set of network-partitions.
// If it is default set, then application instance cannot be terminated.
List<String> defaultNetworkPartitions = algorithm.
getDefaultNetworkPartitions(algorithmContext);
if (!defaultNetworkPartitions.contains(nwPartitionContext.getId())) {
//Since it is not default network-partition, it can be terminated
// upon scale-down of the children as it has been created by bursting
ApplicationBuilder.handleApplicationInstanceTerminatingEvent(this.appId,
instanceContext.getId());
}
}
}
public List<String> getDefaultNetworkPartitions(Application application) {
//Minimum check, Need to get the network partition
NetworkPartitionAlgorithmContext algorithmContext = AutoscalerContext.getInstance().
getNetworkPartitionAlgorithmContext(appId);
ApplicationPolicy applicationPolicy = PolicyManager.getInstance().
getApplicationPolicy(application.getApplicationPolicyId());
List<String> defaultNetworkPartitions = null;
if (applicationPolicy != null) {
String networkPartitionAlgorithmName = applicationPolicy.getAlgorithm();
if (log.isDebugEnabled()) {
String msg = String.format("Network partition algorithm is %s [application-id] %s",
networkPartitionAlgorithmName, appId);
log.debug(msg);
}
NetworkPartitionAlgorithm algorithm = getNetworkPartitionAlgorithm(
networkPartitionAlgorithmName);
if (algorithm == null) {
String msg = String.format("Couldn't create network partition algorithm " +
"[application-id] %s", appId);
log.error(msg);
throw new RuntimeException(msg);
}
// Check whether the network-partition of the application
// instance belongs to default set of network-partitions.
// If it is default set, then application instance cannot be terminated.
defaultNetworkPartitions = algorithm.
getDefaultNetworkPartitions(algorithmContext);
}
return defaultNetworkPartitions;
}
private void checkForMinimumApplicationInstances(Application application,
List<String> defaultNetworkPartitions) {
List<String> instanceIds = new ArrayList<String>();
for (String networkPartitionId : defaultNetworkPartitions) {
if (!networkPartitionContextsMap.containsKey(networkPartitionId)) {
String instanceId;
log.info("Detected a newly updated [network-partition] " + networkPartitionId +
" for [application] " + appId + ". Hence new application instance " +
"creation is going to start now!");
NetworkPartitionContext context =
new NetworkPartitionContext(networkPartitionId);
//If application instances found in the ApplicationsTopology,
// then have to add them first before creating new one
ApplicationInstance appInstance = (ApplicationInstance) application.
getInstanceByNetworkPartitionId(context.getId());
if (appInstance != null) {
log.warn("The [application] " + appId + " already has the " +
"[application-instance] " + appInstance.getInstanceId() + " for the " +
"[network-partition] " + networkPartitionId);
return;
}
instanceId = handleApplicationInstanceCreation(application, context, null);
instanceIds.add(instanceId);
}
}
//Starting the dependencies
if (!instanceIds.isEmpty()) {
startDependency(application, instanceIds);
}
}
private void checkForApplicationInstanceTermination(Application application,
List<String> defaultNetworkPartitions) {
for (NetworkPartitionContext networkPartitionContext : networkPartitionContextsMap.values()) {
String nPartitionId = networkPartitionContext.getId();
if (!defaultNetworkPartitions.contains(nPartitionId)) {
log.info("The [application] " + appId + " runtime cannot be in [network-partition] "
+ nPartitionId + " as it is removed from the [application-policy]...!");
for (InstanceContext instanceContext : networkPartitionContext.
getInstanceIdToInstanceContextMap().values()) {
//Handling application instance termination
ApplicationBuilder.handleApplicationInstanceTerminatingEvent(this.appId,
instanceContext.getId());
}
}
}
}
/**
* Find the group monitor by traversing recursively in the hierarchical monitors.
*
* @param groupId the unique alias of the Group
* @return the found GroupMonitor
*/
public Monitor findGroupMonitorWithId(String groupId) {
//searching within active monitors
return findGroupMonitor(groupId, aliasToActiveChildMonitorsMap);
}
/**
* Utility method to find the group monitor recursively within app monitor
*
* @param id the unique alias of the Group
* @param monitors the group monitors found in the app monitor
* @return the found GroupMonitor
*/
private Monitor findGroupMonitor(String id, Map<String, Monitor> monitors) {
if (monitors.containsKey(id)) {
return monitors.get(id);
}
for (Monitor monitor : monitors.values()) {
if (monitor instanceof ParentComponentMonitor) {
Monitor groupMonitor = findGroupMonitor(id, ((ParentComponentMonitor) monitor).
getAliasToActiveChildMonitorsMap());
if (groupMonitor != null) {
return groupMonitor;
}
}
}
return null;
}
/**
* To set the status of the application monitor
*
* @param status the status
*/
public void setStatus(ApplicationStatus status, String instanceId) {
ApplicationInstance applicationInstance = (ApplicationInstance) this.instanceIdToInstanceMap.
get(instanceId);
if (applicationInstance == null) {
log.warn("The required application [instance] " + instanceId + " not found " +
"in the AppMonitor");
} else {
if (applicationInstance.getStatus() != status) {
applicationInstance.setStatus(status);
}
}
//notify the children about the state change
try {
MonitorStatusEventBuilder.notifyChildren(this, new ApplicationStatusEvent(status,
appId, instanceId));
} catch (MonitorNotFoundException e) {
log.error("Error while notifying the children from [application] " + appId, e);
//TODO revert siblings
}
}
@Override
public void onChildStatusEvent(final MonitorStatusEvent statusEvent) {
Runnable monitoringRunnable = new Runnable() {
@Override
public void run() {
String childId = statusEvent.getId();
String instanceId = statusEvent.getInstanceId();
LifeCycleState status1 = statusEvent.getStatus();
//Events coming from parent are In_Active(in faulty detection), Scaling events, termination
if (status1 == ClusterStatus.Active || status1 == GroupStatus.Active) {
onChildActivatedEvent(childId, instanceId);
} else if (status1 == ClusterStatus.Inactive || status1 == GroupStatus.Inactive) {
markInstanceAsInactive(childId, instanceId);
onChildInactiveEvent(childId, instanceId);
} else if (status1 == ClusterStatus.Terminating || status1 == GroupStatus.Terminating) {
//mark the child monitor as inActive in the map
markInstanceAsTerminating(childId, instanceId);
} else if (status1 == ClusterStatus.Terminated || status1 == GroupStatus.Terminated) {
//Check whether all dependent goes Terminated and then start them in parallel.
removeInstanceFromFromInactiveMap(childId, instanceId);
removeInstanceFromFromTerminatingMap(childId, instanceId);
//If application is forcefully un-deployed, no need to handle here.
if(!force) {
ApplicationInstance instance = (ApplicationInstance) instanceIdToInstanceMap.get(instanceId);
if (instance != null) {
if (isTerminating() || instance.getStatus() == ApplicationStatus.Terminating ||
instance.getStatus() == ApplicationStatus.Terminated) {
ServiceReferenceHolder.getInstance().getGroupStatusProcessorChain().process(id,
appId, instanceId);
} else {
Monitor monitor = getMonitor(childId);
boolean active = false;
if (monitor instanceof GroupMonitor) {
//Checking whether the Group is still active in case the faulty member
// identified after scaling up
active = verifyGroupStatus(childId, instanceId, GroupStatus.Active);
}
if (!active) {
onChildTerminatedEvent(childId, instanceId);
} else {
log.info("[Group Instance] " + instanceId + " is still active " +
"upon termination of the [child ] " + childId);
}
}
} else {
log.warn("The required instance cannot be found in the the [GroupMonitor] " +
id);
}
}
}
}
};
executorService.execute(monitoringRunnable);
}
@Override
public void onParentStatusEvent(MonitorStatusEvent statusEvent) {
// nothing to do
}
@Override
public void onParentScalingEvent(ScalingEvent scalingEvent) {
}
/**
* Utility to create application instance by parsing the deployment policy for a monitor
*
* @param application the application
* @return whether the instance created or not
* @throws TopologyInConsistentException
* @throws PolicyValidationException
*/
public boolean createInstanceAndStartDependency(Application application)
throws TopologyInConsistentException, PolicyValidationException {
boolean initialStartup = true;
try {
List<String> instanceIds = new ArrayList<String>();
String instanceId;
ApplicationPolicy applicationPolicy = PolicyManager.getInstance().
getApplicationPolicy(application.getApplicationPolicyId());
if (applicationPolicy == null) {
String msg = String.format("Application policy not found in registry or " +
"in-memory [application-id] %s", appId);
log.error(msg);
throw new RuntimeException(msg);
}
NetworkPartitionAlgorithmContext algorithmContext = AutoscalerContext.getInstance().
getNetworkPartitionAlgorithmContext(appId);
if (algorithmContext == null) {
String msg = String.format("Network partition algorithm context not found " +
"in registry or in-memory [application-id] %s", appId);
log.error(msg);
throw new RuntimeException(msg);
}
String networkPartitionAlgorithmName = applicationPolicy.getAlgorithm();
if (log.isDebugEnabled()) {
String msg = String.format("Network partition algorithm is %s [application-id] %s",
networkPartitionAlgorithmName, appId);
log.debug(msg);
}
NetworkPartitionAlgorithm algorithm = getNetworkPartitionAlgorithm(
networkPartitionAlgorithmName);
if (algorithm == null) {
String msg = String.format("Couldn't create network partition algorithm " +
"[application-id] %s", appId);
log.error(msg);
throw new RuntimeException(msg);
}
List<String> nextNetworkPartitions = algorithm.getNextNetworkPartitions(algorithmContext);
if (nextNetworkPartitions == null || nextNetworkPartitions.isEmpty()) {
String msg = String.format("No network partitions available for application bursting " +
"[application-id] %s", appId);
log.warn(msg);
return false;
}
for (String networkPartitionIds : nextNetworkPartitions) {
NetworkPartitionContext context =
new NetworkPartitionContext(networkPartitionIds);
//If application instances found in the ApplicationsTopology,
// then have to add them first before creating new one
ApplicationInstance appInstance = (ApplicationInstance) application.
getInstanceByNetworkPartitionId(context.getId());
if (appInstance != null) {
//use the existing instance in the Topology to create the data
if (!isRestarting) {
this.setRestarting(true);
}
instanceId = handleApplicationInstanceCreation(application, context, appInstance);
initialStartup = false;
} else {
//create new app instance as it doesn't exist in the Topology
instanceId = handleApplicationInstanceCreation(application, context, null);
}
instanceIds.add(instanceId);
log.info("Application instance has been added for the [network partition] " +
networkPartitionIds + " [appInstanceId] " + instanceId);
}
//Find whether any other instances exists in cluster
// which has not been added to in-memory model in the restart
Map<String, ApplicationInstance> instanceMap = application.getInstanceIdToInstanceContextMap();
for (ApplicationInstance instance : instanceMap.values()) {
if (!instanceIds.contains(instance.getInstanceId())) {
NetworkPartitionContext context =
new NetworkPartitionContext(instance.getNetworkPartitionId());
//If application instances found in the ApplicationsTopology,
// then have to add them first before creating new one
ApplicationInstance appInstance = (ApplicationInstance) application.
getInstanceByNetworkPartitionId(context.getId());
//use the existing instance in the Topology to create the data
handleApplicationInstanceCreation(application, context, appInstance);
instanceIds.add(instance.getInstanceId());
log.info("Burst Application instance has been added in the restart for " +
"the [network partition] " + instance.getNetworkPartitionId() +
" [appInstanceId] " + instance.getInstanceId());
}
}
if (!instanceIds.isEmpty()) {
startDependency(application, instanceIds);
}
} catch (Exception e) {
log.error(String.format("Application instance creation failed [applcaition-id] %s", appId), e);
}
return initialStartup;
}
/**
* Utility method to create application instance inside a network partition and
* add data structure to monitor
*
* @param application the application where the application instance needs to be created
* @param context networkPartition where instance needs to be created
* @param instanceExist whether application instance exists or not
* @return instance Id
*/
private String handleApplicationInstanceCreation(Application application,
NetworkPartitionContext context,
ApplicationInstance instanceExist) {
ApplicationInstance instance;
ParentInstanceContext instanceContext;
if (instanceExist != null) {
//using the existing instance
instance = instanceExist;
} else {
//creating a new applicationInstance
instance = createApplicationInstance(application, context.getId());
}
String instanceId = instance.getInstanceId();
//Creating appInstanceContext
instanceContext = new ParentInstanceContext(instanceId);
//adding the created App InstanceContext to ApplicationLevelNetworkPartitionContext
context.addInstanceContext(instanceContext);
context.addPendingInstance(instanceContext);
//adding to instance map
this.instanceIdToInstanceMap.put(instanceId, instance);
//adding ApplicationLevelNetworkPartitionContext to networkPartitionContexts map
this.getNetworkPartitionContextsMap().put(context.getId(), context);
return instanceId;
}
/**
* Handling the application bursting into available network partition
*
* @throws TopologyInConsistentException
* @throws PolicyValidationException
* @throws MonitorNotFoundException
*/
public void handleApplicationBursting() throws TopologyInConsistentException,
PolicyValidationException,
MonitorNotFoundException {
Application application = ApplicationHolder.getApplications().getApplication(appId);
if (application == null) {
String msg = "Application cannot be found in the Topology.";
throw new TopologyInConsistentException(msg);
}
boolean burstNPFound = false;
List<String> instanceIdList = new ArrayList<String>();
ApplicationPolicy applicationPolicy = PolicyManager.getInstance().
getApplicationPolicy(application.getApplicationPolicyId());
if (applicationPolicy == null) {
String msg = String.format("Application policy not found in registry or in-memory " +
"[application-id] %s", appId);
log.error(msg);
throw new RuntimeException(msg);
}
NetworkPartitionAlgorithmContext algorithmContext = AutoscalerContext.getInstance().
getNetworkPartitionAlgorithmContext(appId);
if (algorithmContext == null) {
String msg = String.format("Network partition algorithm context not found in" +
" registry or in-memory [application-id] %s", appId);
log.error(msg);
throw new RuntimeException(msg);
}
String networkPartitionAlgorithmName = applicationPolicy.getAlgorithm();
if (log.isDebugEnabled()) {
String msg = String.format("Network partition algorithm is %s [application-id] %s",
networkPartitionAlgorithmName, appId);
log.debug(msg);
}
NetworkPartitionAlgorithm algorithm = getNetworkPartitionAlgorithm(
networkPartitionAlgorithmName);
if (algorithm == null) {
String msg = String.format("Couldn't create network partition algorithm " +
"[application-id] %s", appId);
log.error(msg);
throw new RuntimeException(msg);
}
List<String> nextNetworkPartitions = algorithm.getNextNetworkPartitions(algorithmContext);
if (nextNetworkPartitions == null || nextNetworkPartitions.isEmpty()) {
String msg = String.format("No network partitions available for application " +
"bursting [application-id] %s", appId);
log.warn(msg);
return;
}
for (String networkPartitionId : nextNetworkPartitions) {
if (!this.getNetworkPartitionContextsMap().containsKey(networkPartitionId)) {
String instanceId;
NetworkPartitionContext context = new
NetworkPartitionContext(networkPartitionId);
ApplicationInstance appInstance = (ApplicationInstance) application.
getInstanceByNetworkPartitionId(context.getId());
if (appInstance == null) {
instanceId = handleApplicationInstanceCreation(application, context, null);
} else {
log.warn("The Network partition is already associated with an " +
"[ApplicationInstance] " + appInstance.getInstanceId() +
"in the ApplicationsTopology. Hence not creating new AppInstance.");
instanceId = handleApplicationInstanceCreation(application, context, appInstance);
}
if (instanceId != null) {
instanceIdList.add(instanceId);
}
burstNPFound = true;
}
}
if (!burstNPFound) {
log.warn("[Application] " + appId + " cannot be burst as no available resources found");
} else {
startDependency(application, instanceIdList);
}
}
/**
* Creating application instance into applications Topology
*
* @param application application where instance needs to be added
* @param networkPartitionId network partition of the application Instance
* @return the create application instance in the topology
*/
private ApplicationInstance createApplicationInstance(Application application,
String networkPartitionId) {
//String instanceId = this.generateInstanceId(application);
return ApplicationBuilder.handleApplicationInstanceCreatedEvent(
appId, networkPartitionId);
}
/**
* Whether application is in-terminating or not
*
* @return application state
*/
public boolean isTerminating() {
return isTerminating;
}
public void setTerminating(boolean isTerminating) {
this.isTerminating = isTerminating;
}
@Override
public void destroy() {
stopScheduler();
}
@Override
public boolean createInstanceOnDemand(String instanceId) {
return false;
}
private NetworkPartitionAlgorithm getNetworkPartitionAlgorithm(String algorithmName) {
if (algorithmName == null || algorithmName.isEmpty()) {
return null;
}
if (algorithmName.equals(StratosConstants.NETWORK_PARTITION_ONE_AFTER_ANOTHER_ALGORITHM_ID)) {
if (log.isDebugEnabled()) {
String msg = String.format("Network partition algorithm is set to %s in " +
"application policy",
StratosConstants.NETWORK_PARTITION_ONE_AFTER_ANOTHER_ALGORITHM_ID);
log.debug(msg);
}
return new OneAfterAnotherAlgorithm();
} else if (algorithmName.equals(StratosConstants.NETWORK_PARTITION_ALL_AT_ONCE_ALGORITHM_ID)) {
if (log.isDebugEnabled()) {
String msg = String.format("Network partition algorithm is set to %s " +
"in application policy",
StratosConstants.NETWORK_PARTITION_ALL_AT_ONCE_ALGORITHM_ID);
log.debug(msg);
}
return new AllAtOnceAlgorithm();
}
if (log.isDebugEnabled()) {
String msg = String.format("Invalid network partition algorithm %s found " +
"in application policy",
StratosConstants.NETWORK_PARTITION_ALL_AT_ONCE_ALGORITHM_ID);
log.debug(msg);
}
return null;
}
public boolean isForce() {
return force;
}
public void setForce(boolean force) {
this.force = force;
}
@Override
public boolean createInstanceOnTermination(String instanceId) {
return false;
}
public boolean isRestarting() {
return isRestarting;
}
public void setRestarting(boolean isRestarting) {
this.isRestarting = isRestarting;
}
}
|
googleapis/google-cloud-java | 37,996 | java-dataflow/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/SdkBug.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/dataflow/v1beta3/jobs.proto
// Protobuf Java Version: 3.25.8
package com.google.dataflow.v1beta3;
/**
*
*
* <pre>
* A bug found in the Dataflow SDK.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.SdkBug}
*/
public final class SdkBug extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.dataflow.v1beta3.SdkBug)
SdkBugOrBuilder {
private static final long serialVersionUID = 0L;
// Use SdkBug.newBuilder() to construct.
private SdkBug(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SdkBug() {
type_ = 0;
severity_ = 0;
uri_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SdkBug();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.JobsProto
.internal_static_google_dataflow_v1beta3_SdkBug_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.JobsProto
.internal_static_google_dataflow_v1beta3_SdkBug_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.SdkBug.class,
com.google.dataflow.v1beta3.SdkBug.Builder.class);
}
/**
*
*
* <pre>
* Nature of the issue, ordered from least severe to most. Other bug types may
* be added to this list in the future.
* </pre>
*
* Protobuf enum {@code google.dataflow.v1beta3.SdkBug.Type}
*/
public enum Type implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unknown issue with this SDK.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Catch-all for SDK bugs that don't fit in the below categories.
* </pre>
*
* <code>GENERAL = 1;</code>
*/
GENERAL(1),
/**
*
*
* <pre>
* Using this version of the SDK may result in degraded performance.
* </pre>
*
* <code>PERFORMANCE = 2;</code>
*/
PERFORMANCE(2),
/**
*
*
* <pre>
* Using this version of the SDK may cause data loss.
* </pre>
*
* <code>DATALOSS = 3;</code>
*/
DATALOSS(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unknown issue with this SDK.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
public static final int TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Catch-all for SDK bugs that don't fit in the below categories.
* </pre>
*
* <code>GENERAL = 1;</code>
*/
public static final int GENERAL_VALUE = 1;
/**
*
*
* <pre>
* Using this version of the SDK may result in degraded performance.
* </pre>
*
* <code>PERFORMANCE = 2;</code>
*/
public static final int PERFORMANCE_VALUE = 2;
/**
*
*
* <pre>
* Using this version of the SDK may cause data loss.
* </pre>
*
* <code>DATALOSS = 3;</code>
*/
public static final int DATALOSS_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Type valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Type forNumber(int value) {
switch (value) {
case 0:
return TYPE_UNSPECIFIED;
case 1:
return GENERAL;
case 2:
return PERFORMANCE;
case 3:
return DATALOSS;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Type> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Type>() {
public Type findValueByNumber(int number) {
return Type.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.dataflow.v1beta3.SdkBug.getDescriptor().getEnumTypes().get(0);
}
private static final Type[] VALUES = values();
public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Type(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.dataflow.v1beta3.SdkBug.Type)
}
/**
*
*
* <pre>
* Indicates the severity of the bug. Other severities may be added to this
* list in the future.
* </pre>
*
* Protobuf enum {@code google.dataflow.v1beta3.SdkBug.Severity}
*/
public enum Severity implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* A bug of unknown severity.
* </pre>
*
* <code>SEVERITY_UNSPECIFIED = 0;</code>
*/
SEVERITY_UNSPECIFIED(0),
/**
*
*
* <pre>
* A minor bug that that may reduce reliability or performance for some
* jobs. Impact will be minimal or non-existent for most jobs.
* </pre>
*
* <code>NOTICE = 1;</code>
*/
NOTICE(1),
/**
*
*
* <pre>
* A bug that has some likelihood of causing performance degradation, data
* loss, or job failures.
* </pre>
*
* <code>WARNING = 2;</code>
*/
WARNING(2),
/**
*
*
* <pre>
* A bug with extremely significant impact. Jobs may fail erroneously,
* performance may be severely degraded, and data loss may be very likely.
* </pre>
*
* <code>SEVERE = 3;</code>
*/
SEVERE(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* A bug of unknown severity.
* </pre>
*
* <code>SEVERITY_UNSPECIFIED = 0;</code>
*/
public static final int SEVERITY_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* A minor bug that that may reduce reliability or performance for some
* jobs. Impact will be minimal or non-existent for most jobs.
* </pre>
*
* <code>NOTICE = 1;</code>
*/
public static final int NOTICE_VALUE = 1;
/**
*
*
* <pre>
* A bug that has some likelihood of causing performance degradation, data
* loss, or job failures.
* </pre>
*
* <code>WARNING = 2;</code>
*/
public static final int WARNING_VALUE = 2;
/**
*
*
* <pre>
* A bug with extremely significant impact. Jobs may fail erroneously,
* performance may be severely degraded, and data loss may be very likely.
* </pre>
*
* <code>SEVERE = 3;</code>
*/
public static final int SEVERE_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Severity valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Severity forNumber(int value) {
switch (value) {
case 0:
return SEVERITY_UNSPECIFIED;
case 1:
return NOTICE;
case 2:
return WARNING;
case 3:
return SEVERE;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Severity> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Severity> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Severity>() {
public Severity findValueByNumber(int number) {
return Severity.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.dataflow.v1beta3.SdkBug.getDescriptor().getEnumTypes().get(1);
}
private static final Severity[] VALUES = values();
public static Severity valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Severity(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.dataflow.v1beta3.SdkBug.Severity)
}
public static final int TYPE_FIELD_NUMBER = 1;
private int type_ = 0;
/**
*
*
* <pre>
* Output only. Describes the impact of this SDK bug.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Type type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* Output only. Describes the impact of this SDK bug.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Type type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The type.
*/
@java.lang.Override
public com.google.dataflow.v1beta3.SdkBug.Type getType() {
com.google.dataflow.v1beta3.SdkBug.Type result =
com.google.dataflow.v1beta3.SdkBug.Type.forNumber(type_);
return result == null ? com.google.dataflow.v1beta3.SdkBug.Type.UNRECOGNIZED : result;
}
public static final int SEVERITY_FIELD_NUMBER = 2;
private int severity_ = 0;
/**
*
*
* <pre>
* Output only. How severe the SDK bug is.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Severity severity = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for severity.
*/
@java.lang.Override
public int getSeverityValue() {
return severity_;
}
/**
*
*
* <pre>
* Output only. How severe the SDK bug is.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Severity severity = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The severity.
*/
@java.lang.Override
public com.google.dataflow.v1beta3.SdkBug.Severity getSeverity() {
com.google.dataflow.v1beta3.SdkBug.Severity result =
com.google.dataflow.v1beta3.SdkBug.Severity.forNumber(severity_);
return result == null ? com.google.dataflow.v1beta3.SdkBug.Severity.UNRECOGNIZED : result;
}
public static final int URI_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object uri_ = "";
/**
*
*
* <pre>
* Output only. Link to more information on the bug.
* </pre>
*
* <code>string uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The uri.
*/
@java.lang.Override
public java.lang.String getUri() {
java.lang.Object ref = uri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
uri_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Link to more information on the bug.
* </pre>
*
* <code>string uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for uri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = uri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
uri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (type_ != com.google.dataflow.v1beta3.SdkBug.Type.TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(1, type_);
}
if (severity_ != com.google.dataflow.v1beta3.SdkBug.Severity.SEVERITY_UNSPECIFIED.getNumber()) {
output.writeEnum(2, severity_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, uri_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (type_ != com.google.dataflow.v1beta3.SdkBug.Type.TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, type_);
}
if (severity_ != com.google.dataflow.v1beta3.SdkBug.Severity.SEVERITY_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, severity_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, uri_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.dataflow.v1beta3.SdkBug)) {
return super.equals(obj);
}
com.google.dataflow.v1beta3.SdkBug other = (com.google.dataflow.v1beta3.SdkBug) obj;
if (type_ != other.type_) return false;
if (severity_ != other.severity_) return false;
if (!getUri().equals(other.getUri())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
hash = (37 * hash) + SEVERITY_FIELD_NUMBER;
hash = (53 * hash) + severity_;
hash = (37 * hash) + URI_FIELD_NUMBER;
hash = (53 * hash) + getUri().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.dataflow.v1beta3.SdkBug parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.SdkBug parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.SdkBug parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.SdkBug parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.SdkBug parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.SdkBug parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.SdkBug parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.SdkBug parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.SdkBug parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.SdkBug parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.SdkBug parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.SdkBug parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.dataflow.v1beta3.SdkBug prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A bug found in the Dataflow SDK.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.SdkBug}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.dataflow.v1beta3.SdkBug)
com.google.dataflow.v1beta3.SdkBugOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.JobsProto
.internal_static_google_dataflow_v1beta3_SdkBug_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.JobsProto
.internal_static_google_dataflow_v1beta3_SdkBug_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.SdkBug.class,
com.google.dataflow.v1beta3.SdkBug.Builder.class);
}
// Construct using com.google.dataflow.v1beta3.SdkBug.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
type_ = 0;
severity_ = 0;
uri_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.dataflow.v1beta3.JobsProto
.internal_static_google_dataflow_v1beta3_SdkBug_descriptor;
}
@java.lang.Override
public com.google.dataflow.v1beta3.SdkBug getDefaultInstanceForType() {
return com.google.dataflow.v1beta3.SdkBug.getDefaultInstance();
}
@java.lang.Override
public com.google.dataflow.v1beta3.SdkBug build() {
com.google.dataflow.v1beta3.SdkBug result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.dataflow.v1beta3.SdkBug buildPartial() {
com.google.dataflow.v1beta3.SdkBug result = new com.google.dataflow.v1beta3.SdkBug(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.dataflow.v1beta3.SdkBug result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.type_ = type_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.severity_ = severity_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.uri_ = uri_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.dataflow.v1beta3.SdkBug) {
return mergeFrom((com.google.dataflow.v1beta3.SdkBug) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.dataflow.v1beta3.SdkBug other) {
if (other == com.google.dataflow.v1beta3.SdkBug.getDefaultInstance()) return this;
if (other.type_ != 0) {
setTypeValue(other.getTypeValue());
}
if (other.severity_ != 0) {
setSeverityValue(other.getSeverityValue());
}
if (!other.getUri().isEmpty()) {
uri_ = other.uri_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
type_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
severity_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
uri_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int type_ = 0;
/**
*
*
* <pre>
* Output only. Describes the impact of this SDK bug.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Type type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* Output only. Describes the impact of this SDK bug.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Type type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The enum numeric value on the wire for type to set.
* @return This builder for chaining.
*/
public Builder setTypeValue(int value) {
type_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Describes the impact of this SDK bug.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Type type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The type.
*/
@java.lang.Override
public com.google.dataflow.v1beta3.SdkBug.Type getType() {
com.google.dataflow.v1beta3.SdkBug.Type result =
com.google.dataflow.v1beta3.SdkBug.Type.forNumber(type_);
return result == null ? com.google.dataflow.v1beta3.SdkBug.Type.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Output only. Describes the impact of this SDK bug.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Type type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(com.google.dataflow.v1beta3.SdkBug.Type value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
type_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Describes the impact of this SDK bug.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Type type = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000001);
type_ = 0;
onChanged();
return this;
}
private int severity_ = 0;
/**
*
*
* <pre>
* Output only. How severe the SDK bug is.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Severity severity = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for severity.
*/
@java.lang.Override
public int getSeverityValue() {
return severity_;
}
/**
*
*
* <pre>
* Output only. How severe the SDK bug is.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Severity severity = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The enum numeric value on the wire for severity to set.
* @return This builder for chaining.
*/
public Builder setSeverityValue(int value) {
severity_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. How severe the SDK bug is.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Severity severity = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The severity.
*/
@java.lang.Override
public com.google.dataflow.v1beta3.SdkBug.Severity getSeverity() {
com.google.dataflow.v1beta3.SdkBug.Severity result =
com.google.dataflow.v1beta3.SdkBug.Severity.forNumber(severity_);
return result == null ? com.google.dataflow.v1beta3.SdkBug.Severity.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Output only. How severe the SDK bug is.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Severity severity = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The severity to set.
* @return This builder for chaining.
*/
public Builder setSeverity(com.google.dataflow.v1beta3.SdkBug.Severity value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
severity_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. How severe the SDK bug is.
* </pre>
*
* <code>
* .google.dataflow.v1beta3.SdkBug.Severity severity = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearSeverity() {
bitField0_ = (bitField0_ & ~0x00000002);
severity_ = 0;
onChanged();
return this;
}
private java.lang.Object uri_ = "";
/**
*
*
* <pre>
* Output only. Link to more information on the bug.
* </pre>
*
* <code>string uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The uri.
*/
public java.lang.String getUri() {
java.lang.Object ref = uri_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
uri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Link to more information on the bug.
* </pre>
*
* <code>string uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for uri.
*/
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = uri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
uri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Link to more information on the bug.
* </pre>
*
* <code>string uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The uri to set.
* @return This builder for chaining.
*/
public Builder setUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
uri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Link to more information on the bug.
* </pre>
*
* <code>string uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearUri() {
uri_ = getDefaultInstance().getUri();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Link to more information on the bug.
* </pre>
*
* <code>string uri = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for uri to set.
* @return This builder for chaining.
*/
public Builder setUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
uri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.dataflow.v1beta3.SdkBug)
}
// @@protoc_insertion_point(class_scope:google.dataflow.v1beta3.SdkBug)
private static final com.google.dataflow.v1beta3.SdkBug DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.dataflow.v1beta3.SdkBug();
}
public static com.google.dataflow.v1beta3.SdkBug getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SdkBug> PARSER =
new com.google.protobuf.AbstractParser<SdkBug>() {
@java.lang.Override
public SdkBug parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SdkBug> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SdkBug> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.dataflow.v1beta3.SdkBug getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,158 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/TestIamPermissionsSnapshotRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for Snapshots.TestIamPermissions. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.TestIamPermissionsSnapshotRequest}
*/
public final class TestIamPermissionsSnapshotRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.TestIamPermissionsSnapshotRequest)
TestIamPermissionsSnapshotRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use TestIamPermissionsSnapshotRequest.newBuilder() to construct.
private TestIamPermissionsSnapshotRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TestIamPermissionsSnapshotRequest() {
project_ = "";
resource_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TestIamPermissionsSnapshotRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_TestIamPermissionsSnapshotRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_TestIamPermissionsSnapshotRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest.class,
com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest.Builder.class);
}
private int bitField0_;
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 195806222;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TEST_PERMISSIONS_REQUEST_RESOURCE_FIELD_NUMBER = 439214758;
private com.google.cloud.compute.v1.TestPermissionsRequest testPermissionsRequestResource_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the testPermissionsRequestResource field is set.
*/
@java.lang.Override
public boolean hasTestPermissionsRequestResource() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The testPermissionsRequestResource.
*/
@java.lang.Override
public com.google.cloud.compute.v1.TestPermissionsRequest getTestPermissionsRequestResource() {
return testPermissionsRequestResource_ == null
? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance()
: testPermissionsRequestResource_;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder
getTestPermissionsRequestResourceOrBuilder() {
return testPermissionsRequestResource_ == null
? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance()
: testPermissionsRequestResource_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(439214758, getTestPermissionsRequestResource());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
439214758, getTestPermissionsRequestResource());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest other =
(com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest) obj;
if (!getProject().equals(other.getProject())) return false;
if (!getResource().equals(other.getResource())) return false;
if (hasTestPermissionsRequestResource() != other.hasTestPermissionsRequestResource())
return false;
if (hasTestPermissionsRequestResource()) {
if (!getTestPermissionsRequestResource().equals(other.getTestPermissionsRequestResource()))
return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
if (hasTestPermissionsRequestResource()) {
hash = (37 * hash) + TEST_PERMISSIONS_REQUEST_RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getTestPermissionsRequestResource().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for Snapshots.TestIamPermissions. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.TestIamPermissionsSnapshotRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.TestIamPermissionsSnapshotRequest)
com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_TestIamPermissionsSnapshotRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_TestIamPermissionsSnapshotRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest.class,
com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTestPermissionsRequestResourceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
project_ = "";
resource_ = "";
testPermissionsRequestResource_ = null;
if (testPermissionsRequestResourceBuilder_ != null) {
testPermissionsRequestResourceBuilder_.dispose();
testPermissionsRequestResourceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_TestIamPermissionsSnapshotRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest build() {
com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest buildPartial() {
com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest result =
new com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.resource_ = resource_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.testPermissionsRequestResource_ =
testPermissionsRequestResourceBuilder_ == null
? testPermissionsRequestResource_
: testPermissionsRequestResourceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest) {
return mergeFrom((com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest other) {
if (other
== com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest.getDefaultInstance())
return this;
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasTestPermissionsRequestResource()) {
mergeTestPermissionsRequestResource(other.getTestPermissionsRequestResource());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1566449778:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1566449778
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 1820481738
case -781249230:
{
input.readMessage(
getTestPermissionsRequestResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case -781249230
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.compute.v1.TestPermissionsRequest testPermissionsRequestResource_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.TestPermissionsRequest,
com.google.cloud.compute.v1.TestPermissionsRequest.Builder,
com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder>
testPermissionsRequestResourceBuilder_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the testPermissionsRequestResource field is set.
*/
public boolean hasTestPermissionsRequestResource() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The testPermissionsRequestResource.
*/
public com.google.cloud.compute.v1.TestPermissionsRequest getTestPermissionsRequestResource() {
if (testPermissionsRequestResourceBuilder_ == null) {
return testPermissionsRequestResource_ == null
? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance()
: testPermissionsRequestResource_;
} else {
return testPermissionsRequestResourceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTestPermissionsRequestResource(
com.google.cloud.compute.v1.TestPermissionsRequest value) {
if (testPermissionsRequestResourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
testPermissionsRequestResource_ = value;
} else {
testPermissionsRequestResourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTestPermissionsRequestResource(
com.google.cloud.compute.v1.TestPermissionsRequest.Builder builderForValue) {
if (testPermissionsRequestResourceBuilder_ == null) {
testPermissionsRequestResource_ = builderForValue.build();
} else {
testPermissionsRequestResourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTestPermissionsRequestResource(
com.google.cloud.compute.v1.TestPermissionsRequest value) {
if (testPermissionsRequestResourceBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& testPermissionsRequestResource_ != null
&& testPermissionsRequestResource_
!= com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance()) {
getTestPermissionsRequestResourceBuilder().mergeFrom(value);
} else {
testPermissionsRequestResource_ = value;
}
} else {
testPermissionsRequestResourceBuilder_.mergeFrom(value);
}
if (testPermissionsRequestResource_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTestPermissionsRequestResource() {
bitField0_ = (bitField0_ & ~0x00000004);
testPermissionsRequestResource_ = null;
if (testPermissionsRequestResourceBuilder_ != null) {
testPermissionsRequestResourceBuilder_.dispose();
testPermissionsRequestResourceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.TestPermissionsRequest.Builder
getTestPermissionsRequestResourceBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTestPermissionsRequestResourceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder
getTestPermissionsRequestResourceOrBuilder() {
if (testPermissionsRequestResourceBuilder_ != null) {
return testPermissionsRequestResourceBuilder_.getMessageOrBuilder();
} else {
return testPermissionsRequestResource_ == null
? com.google.cloud.compute.v1.TestPermissionsRequest.getDefaultInstance()
: testPermissionsRequestResource_;
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.TestPermissionsRequest test_permissions_request_resource = 439214758 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.TestPermissionsRequest,
com.google.cloud.compute.v1.TestPermissionsRequest.Builder,
com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder>
getTestPermissionsRequestResourceFieldBuilder() {
if (testPermissionsRequestResourceBuilder_ == null) {
testPermissionsRequestResourceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.TestPermissionsRequest,
com.google.cloud.compute.v1.TestPermissionsRequest.Builder,
com.google.cloud.compute.v1.TestPermissionsRequestOrBuilder>(
getTestPermissionsRequestResource(), getParentForChildren(), isClean());
testPermissionsRequestResource_ = null;
}
return testPermissionsRequestResourceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.TestIamPermissionsSnapshotRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.TestIamPermissionsSnapshotRequest)
private static final com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest();
}
public static com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TestIamPermissionsSnapshotRequest> PARSER =
new com.google.protobuf.AbstractParser<TestIamPermissionsSnapshotRequest>() {
@java.lang.Override
public TestIamPermissionsSnapshotRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TestIamPermissionsSnapshotRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TestIamPermissionsSnapshotRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.TestIamPermissionsSnapshotRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,065 | java-saasservicemgmt/proto-google-cloud-saasservicemgmt-v1beta1/src/main/java/com/google/cloud/saasplatform/saasservicemgmt/v1beta1/UnitOrBuilder.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/saasplatform/saasservicemgmt/v1beta1/deployments_resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.saasplatform.saasservicemgmt.v1beta1;
public interface UnitOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Identifier. The resource name (full URI of the resource) following the
* standard naming scheme:
*
* "projects/{project}/locations/{location}/units/{unit}"
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code>
*
* @return The name.
*/
java.lang.String getName();
/**
*
*
* <pre>
* Identifier. The resource name (full URI of the resource) following the
* standard naming scheme:
*
* "projects/{project}/locations/{location}/units/{unit}"
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code>
*
* @return The bytes for name.
*/
com.google.protobuf.ByteString getNameBytes();
/**
*
*
* <pre>
* Optional. Reference to the UnitKind this Unit belongs to. Immutable once
* set.
* </pre>
*
* <code>
* string unit_kind = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The unitKind.
*/
java.lang.String getUnitKind();
/**
*
*
* <pre>
* Optional. Reference to the UnitKind this Unit belongs to. Immutable once
* set.
* </pre>
*
* <code>
* string unit_kind = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for unitKind.
*/
com.google.protobuf.ByteString getUnitKindBytes();
/**
*
*
* <pre>
* Optional. Output only. The current Release object for this Unit.
* </pre>
*
* <code>
* string release = 13 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The release.
*/
java.lang.String getRelease();
/**
*
*
* <pre>
* Optional. Output only. The current Release object for this Unit.
* </pre>
*
* <code>
* string release = 13 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for release.
*/
com.google.protobuf.ByteString getReleaseBytes();
/**
*
*
* <pre>
* Optional. Reference to the Saas Tenant resource this unit belongs to. This
* for example informs the maintenance policies to use for scheduling future
* updates on a unit. (optional and immutable once created)
* </pre>
*
* <code>
* string tenant = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The tenant.
*/
java.lang.String getTenant();
/**
*
*
* <pre>
* Optional. Reference to the Saas Tenant resource this unit belongs to. This
* for example informs the maintenance policies to use for scheduling future
* updates on a unit. (optional and immutable once created)
* </pre>
*
* <code>
* string tenant = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for tenant.
*/
com.google.protobuf.ByteString getTenantBytes();
/**
*
*
* <pre>
* Optional. Output only. List of concurrent UnitOperations that are operating
* on this Unit.
* </pre>
*
* <code>
* repeated string ongoing_operations = 5 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return A list containing the ongoingOperations.
*/
java.util.List<java.lang.String> getOngoingOperationsList();
/**
*
*
* <pre>
* Optional. Output only. List of concurrent UnitOperations that are operating
* on this Unit.
* </pre>
*
* <code>
* repeated string ongoing_operations = 5 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The count of ongoingOperations.
*/
int getOngoingOperationsCount();
/**
*
*
* <pre>
* Optional. Output only. List of concurrent UnitOperations that are operating
* on this Unit.
* </pre>
*
* <code>
* repeated string ongoing_operations = 5 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the element to return.
* @return The ongoingOperations at the given index.
*/
java.lang.String getOngoingOperations(int index);
/**
*
*
* <pre>
* Optional. Output only. List of concurrent UnitOperations that are operating
* on this Unit.
* </pre>
*
* <code>
* repeated string ongoing_operations = 5 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the ongoingOperations at the given index.
*/
com.google.protobuf.ByteString getOngoingOperationsBytes(int index);
/**
*
*
* <pre>
* Optional. Output only. List of pending (wait to be executed) UnitOperations
* for this unit.
* </pre>
*
* <code>
* repeated string pending_operations = 6 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return A list containing the pendingOperations.
*/
java.util.List<java.lang.String> getPendingOperationsList();
/**
*
*
* <pre>
* Optional. Output only. List of pending (wait to be executed) UnitOperations
* for this unit.
* </pre>
*
* <code>
* repeated string pending_operations = 6 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The count of pendingOperations.
*/
int getPendingOperationsCount();
/**
*
*
* <pre>
* Optional. Output only. List of pending (wait to be executed) UnitOperations
* for this unit.
* </pre>
*
* <code>
* repeated string pending_operations = 6 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the element to return.
* @return The pendingOperations at the given index.
*/
java.lang.String getPendingOperations(int index);
/**
*
*
* <pre>
* Optional. Output only. List of pending (wait to be executed) UnitOperations
* for this unit.
* </pre>
*
* <code>
* repeated string pending_operations = 6 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the pendingOperations at the given index.
*/
com.google.protobuf.ByteString getPendingOperationsBytes(int index);
/**
*
*
* <pre>
* Optional. Output only. List of scheduled UnitOperations for this unit.
* </pre>
*
* <code>
* repeated string scheduled_operations = 24 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return A list containing the scheduledOperations.
*/
java.util.List<java.lang.String> getScheduledOperationsList();
/**
*
*
* <pre>
* Optional. Output only. List of scheduled UnitOperations for this unit.
* </pre>
*
* <code>
* repeated string scheduled_operations = 24 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The count of scheduledOperations.
*/
int getScheduledOperationsCount();
/**
*
*
* <pre>
* Optional. Output only. List of scheduled UnitOperations for this unit.
* </pre>
*
* <code>
* repeated string scheduled_operations = 24 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the element to return.
* @return The scheduledOperations at the given index.
*/
java.lang.String getScheduledOperations(int index);
/**
*
*
* <pre>
* Optional. Output only. List of scheduled UnitOperations for this unit.
* </pre>
*
* <code>
* repeated string scheduled_operations = 24 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the scheduledOperations at the given index.
*/
com.google.protobuf.ByteString getScheduledOperationsBytes(int index);
/**
*
*
* <pre>
* Optional. Output only. List of Units that depend on this unit. Unit can
* only be deprovisioned if this list is empty. Maximum 1000.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency dependents = 7 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency>
getDependentsList();
/**
*
*
* <pre>
* Optional. Output only. List of Units that depend on this unit. Unit can
* only be deprovisioned if this list is empty. Maximum 1000.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency dependents = 7 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency getDependents(int index);
/**
*
*
* <pre>
* Optional. Output only. List of Units that depend on this unit. Unit can
* only be deprovisioned if this list is empty. Maximum 1000.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency dependents = 7 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
int getDependentsCount();
/**
*
*
* <pre>
* Optional. Output only. List of Units that depend on this unit. Unit can
* only be deprovisioned if this list is empty. Maximum 1000.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency dependents = 7 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<
? extends com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependencyOrBuilder>
getDependentsOrBuilderList();
/**
*
*
* <pre>
* Optional. Output only. List of Units that depend on this unit. Unit can
* only be deprovisioned if this list is empty. Maximum 1000.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency dependents = 7 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependencyOrBuilder
getDependentsOrBuilder(int index);
/**
*
*
* <pre>
* Optional. Output only. Set of dependencies for this unit. Maximum 10.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency dependencies = 8 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency>
getDependenciesList();
/**
*
*
* <pre>
* Optional. Output only. Set of dependencies for this unit. Maximum 10.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency dependencies = 8 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency getDependencies(int index);
/**
*
*
* <pre>
* Optional. Output only. Set of dependencies for this unit. Maximum 10.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency dependencies = 8 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
int getDependenciesCount();
/**
*
*
* <pre>
* Optional. Output only. Set of dependencies for this unit. Maximum 10.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency dependencies = 8 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<
? extends com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependencyOrBuilder>
getDependenciesOrBuilderList();
/**
*
*
* <pre>
* Optional. Output only. Set of dependencies for this unit. Maximum 10.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependency dependencies = 8 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitDependencyOrBuilder
getDependenciesOrBuilder(int index);
/**
*
*
* <pre>
* Optional. Output only. Indicates the current input variables deployed by
* the unit
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable input_variables = 9 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable>
getInputVariablesList();
/**
*
*
* <pre>
* Optional. Output only. Indicates the current input variables deployed by
* the unit
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable input_variables = 9 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable getInputVariables(int index);
/**
*
*
* <pre>
* Optional. Output only. Indicates the current input variables deployed by
* the unit
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable input_variables = 9 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
int getInputVariablesCount();
/**
*
*
* <pre>
* Optional. Output only. Indicates the current input variables deployed by
* the unit
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable input_variables = 9 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<
? extends com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariableOrBuilder>
getInputVariablesOrBuilderList();
/**
*
*
* <pre>
* Optional. Output only. Indicates the current input variables deployed by
* the unit
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable input_variables = 9 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariableOrBuilder
getInputVariablesOrBuilder(int index);
/**
*
*
* <pre>
* Optional. Output only. Set of key/value pairs corresponding to output
* variables from execution of actuation templates. The variables are declared
* in actuation configs (e.g in helm chart or terraform) and the values are
* fetched and returned by the actuation engine upon completion of execution.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable output_variables = 10 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable>
getOutputVariablesList();
/**
*
*
* <pre>
* Optional. Output only. Set of key/value pairs corresponding to output
* variables from execution of actuation templates. The variables are declared
* in actuation configs (e.g in helm chart or terraform) and the values are
* fetched and returned by the actuation engine upon completion of execution.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable output_variables = 10 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable getOutputVariables(int index);
/**
*
*
* <pre>
* Optional. Output only. Set of key/value pairs corresponding to output
* variables from execution of actuation templates. The variables are declared
* in actuation configs (e.g in helm chart or terraform) and the values are
* fetched and returned by the actuation engine upon completion of execution.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable output_variables = 10 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
int getOutputVariablesCount();
/**
*
*
* <pre>
* Optional. Output only. Set of key/value pairs corresponding to output
* variables from execution of actuation templates. The variables are declared
* in actuation configs (e.g in helm chart or terraform) and the values are
* fetched and returned by the actuation engine upon completion of execution.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable output_variables = 10 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<
? extends com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariableOrBuilder>
getOutputVariablesOrBuilderList();
/**
*
*
* <pre>
* Optional. Output only. Set of key/value pairs corresponding to output
* variables from execution of actuation templates. The variables are declared
* in actuation configs (e.g in helm chart or terraform) and the values are
* fetched and returned by the actuation engine upon completion of execution.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariable output_variables = 10 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitVariableOrBuilder
getOutputVariablesOrBuilder(int index);
/**
*
*
* <pre>
* Optional. Captures requested directives for performing future maintenance
* on the unit. This includes a request for the unit to skip maintenance for a
* period of time and remain pinned to its current release as well as controls
* for postponing maintenance scheduled in future.
* </pre>
*
* <code>
* .google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.MaintenanceSettings maintenance = 14 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the maintenance field is set.
*/
boolean hasMaintenance();
/**
*
*
* <pre>
* Optional. Captures requested directives for performing future maintenance
* on the unit. This includes a request for the unit to skip maintenance for a
* period of time and remain pinned to its current release as well as controls
* for postponing maintenance scheduled in future.
* </pre>
*
* <code>
* .google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.MaintenanceSettings maintenance = 14 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The maintenance.
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.MaintenanceSettings getMaintenance();
/**
*
*
* <pre>
* Optional. Captures requested directives for performing future maintenance
* on the unit. This includes a request for the unit to skip maintenance for a
* period of time and remain pinned to its current release as well as controls
* for postponing maintenance scheduled in future.
* </pre>
*
* <code>
* .google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.MaintenanceSettings maintenance = 14 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.MaintenanceSettingsOrBuilder
getMaintenanceOrBuilder();
/**
*
*
* <pre>
* Optional. Output only. Current lifecycle state of the resource (e.g. if
* it's being created or ready to use).
* </pre>
*
* <code>
* .google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.UnitState state = 16 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for state.
*/
int getStateValue();
/**
*
*
* <pre>
* Optional. Output only. Current lifecycle state of the resource (e.g. if
* it's being created or ready to use).
* </pre>
*
* <code>
* .google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.UnitState state = 16 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The state.
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.UnitState getState();
/**
*
*
* <pre>
* Optional. Output only. A set of conditions which indicate the various
* conditions this resource can have.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitCondition conditions = 20 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitCondition>
getConditionsList();
/**
*
*
* <pre>
* Optional. Output only. A set of conditions which indicate the various
* conditions this resource can have.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitCondition conditions = 20 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitCondition getConditions(int index);
/**
*
*
* <pre>
* Optional. Output only. A set of conditions which indicate the various
* conditions this resource can have.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitCondition conditions = 20 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
int getConditionsCount();
/**
*
*
* <pre>
* Optional. Output only. A set of conditions which indicate the various
* conditions this resource can have.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitCondition conditions = 20 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<
? extends com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitConditionOrBuilder>
getConditionsOrBuilderList();
/**
*
*
* <pre>
* Optional. Output only. A set of conditions which indicate the various
* conditions this resource can have.
* </pre>
*
* <code>
* repeated .google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitCondition conditions = 20 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.UnitConditionOrBuilder
getConditionsOrBuilder(int index);
/**
*
*
* <pre>
* Optional. Immutable. Indicates whether the Unit life cycle is controlled
* by the user or by the system.
* Immutable once created.
* </pre>
*
* <code>
* .google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.ManagementMode management_mode = 22 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The enum numeric value on the wire for managementMode.
*/
int getManagementModeValue();
/**
*
*
* <pre>
* Optional. Immutable. Indicates whether the Unit life cycle is controlled
* by the user or by the system.
* Immutable once created.
* </pre>
*
* <code>
* .google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.ManagementMode management_mode = 22 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The managementMode.
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.ManagementMode getManagementMode();
/**
*
*
* <pre>
* Optional. Output only. Indicates the system managed state of the unit.
* </pre>
*
* <code>
* .google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.SystemManagedState system_managed_state = 25 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for systemManagedState.
*/
int getSystemManagedStateValue();
/**
*
*
* <pre>
* Optional. Output only. Indicates the system managed state of the unit.
* </pre>
*
* <code>
* .google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.SystemManagedState system_managed_state = 25 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The systemManagedState.
*/
com.google.cloud.saasplatform.saasservicemgmt.v1beta1.Unit.SystemManagedState
getSystemManagedState();
/**
*
*
* <pre>
* Optional. Output only. If set, indicates the time when the system will
* start removing the unit.
* </pre>
*
* <code>
* .google.protobuf.Timestamp system_cleanup_at = 26 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the systemCleanupAt field is set.
*/
boolean hasSystemCleanupAt();
/**
*
*
* <pre>
* Optional. Output only. If set, indicates the time when the system will
* start removing the unit.
* </pre>
*
* <code>
* .google.protobuf.Timestamp system_cleanup_at = 26 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The systemCleanupAt.
*/
com.google.protobuf.Timestamp getSystemCleanupAt();
/**
*
*
* <pre>
* Optional. Output only. If set, indicates the time when the system will
* start removing the unit.
* </pre>
*
* <code>
* .google.protobuf.Timestamp system_cleanup_at = 26 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getSystemCleanupAtOrBuilder();
/**
*
*
* <pre>
* Optional. The labels on the resource, which can be used for categorization.
* similar to Kubernetes resource labels.
* </pre>
*
* <code>map<string, string> labels = 10401 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getLabelsCount();
/**
*
*
* <pre>
* Optional. The labels on the resource, which can be used for categorization.
* similar to Kubernetes resource labels.
* </pre>
*
* <code>map<string, string> labels = 10401 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
boolean containsLabels(java.lang.String key);
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String> getLabels();
/**
*
*
* <pre>
* Optional. The labels on the resource, which can be used for categorization.
* similar to Kubernetes resource labels.
* </pre>
*
* <code>map<string, string> labels = 10401 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.Map<java.lang.String, java.lang.String> getLabelsMap();
/**
*
*
* <pre>
* Optional. The labels on the resource, which can be used for categorization.
* similar to Kubernetes resource labels.
* </pre>
*
* <code>map<string, string> labels = 10401 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
/* nullable */
java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue);
/**
*
*
* <pre>
* Optional. The labels on the resource, which can be used for categorization.
* similar to Kubernetes resource labels.
* </pre>
*
* <code>map<string, string> labels = 10401 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.lang.String getLabelsOrThrow(java.lang.String key);
/**
*
*
* <pre>
* Optional. Annotations is an unstructured key-value map stored with a
* resource that may be set by external tools to store and retrieve arbitrary
* metadata. They are not queryable and should be preserved when modifying
* objects.
*
* More info: https://kubernetes.io/docs/user-guide/annotations
* </pre>
*
* <code>map<string, string> annotations = 10402 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getAnnotationsCount();
/**
*
*
* <pre>
* Optional. Annotations is an unstructured key-value map stored with a
* resource that may be set by external tools to store and retrieve arbitrary
* metadata. They are not queryable and should be preserved when modifying
* objects.
*
* More info: https://kubernetes.io/docs/user-guide/annotations
* </pre>
*
* <code>map<string, string> annotations = 10402 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
boolean containsAnnotations(java.lang.String key);
/** Use {@link #getAnnotationsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String> getAnnotations();
/**
*
*
* <pre>
* Optional. Annotations is an unstructured key-value map stored with a
* resource that may be set by external tools to store and retrieve arbitrary
* metadata. They are not queryable and should be preserved when modifying
* objects.
*
* More info: https://kubernetes.io/docs/user-guide/annotations
* </pre>
*
* <code>map<string, string> annotations = 10402 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.Map<java.lang.String, java.lang.String> getAnnotationsMap();
/**
*
*
* <pre>
* Optional. Annotations is an unstructured key-value map stored with a
* resource that may be set by external tools to store and retrieve arbitrary
* metadata. They are not queryable and should be preserved when modifying
* objects.
*
* More info: https://kubernetes.io/docs/user-guide/annotations
* </pre>
*
* <code>map<string, string> annotations = 10402 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
/* nullable */
java.lang.String getAnnotationsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue);
/**
*
*
* <pre>
* Optional. Annotations is an unstructured key-value map stored with a
* resource that may be set by external tools to store and retrieve arbitrary
* metadata. They are not queryable and should be preserved when modifying
* objects.
*
* More info: https://kubernetes.io/docs/user-guide/annotations
* </pre>
*
* <code>map<string, string> annotations = 10402 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.lang.String getAnnotationsOrThrow(java.lang.String key);
/**
*
*
* <pre>
* Output only. The unique identifier of the resource. UID is unique in the
* time and space for this resource within the scope of the service. It is
* typically generated by the server on successful creation of a resource
* and must not be changed. UID is used to uniquely identify resources
* with resource name reuses. This should be a UUID4.
* </pre>
*
* <code>
* string uid = 10201 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_info) = { ... }
* </code>
*
* @return The uid.
*/
java.lang.String getUid();
/**
*
*
* <pre>
* Output only. The unique identifier of the resource. UID is unique in the
* time and space for this resource within the scope of the service. It is
* typically generated by the server on successful creation of a resource
* and must not be changed. UID is used to uniquely identify resources
* with resource name reuses. This should be a UUID4.
* </pre>
*
* <code>
* string uid = 10201 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_info) = { ... }
* </code>
*
* @return The bytes for uid.
*/
com.google.protobuf.ByteString getUidBytes();
/**
*
*
* <pre>
* Output only. An opaque value that uniquely identifies a version or
* generation of a resource. It can be used to confirm that the client
* and server agree on the ordering of a resource being written.
* </pre>
*
* <code>string etag = 10202 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The etag.
*/
java.lang.String getEtag();
/**
*
*
* <pre>
* Output only. An opaque value that uniquely identifies a version or
* generation of a resource. It can be used to confirm that the client
* and server agree on the ordering of a resource being written.
* </pre>
*
* <code>string etag = 10202 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for etag.
*/
com.google.protobuf.ByteString getEtagBytes();
/**
*
*
* <pre>
* Output only. The timestamp when the resource was created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 10303 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
boolean hasCreateTime();
/**
*
*
* <pre>
* Output only. The timestamp when the resource was created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 10303 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
com.google.protobuf.Timestamp getCreateTime();
/**
*
*
* <pre>
* Output only. The timestamp when the resource was created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 10303 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder();
/**
*
*
* <pre>
* Output only. The timestamp when the resource was last updated. Any
* change to the resource made by users must refresh this value.
* Changes to a resource made by the service should refresh this value.
* </pre>
*
* <code>
* .google.protobuf.Timestamp update_time = 10304 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the updateTime field is set.
*/
boolean hasUpdateTime();
/**
*
*
* <pre>
* Output only. The timestamp when the resource was last updated. Any
* change to the resource made by users must refresh this value.
* Changes to a resource made by the service should refresh this value.
* </pre>
*
* <code>
* .google.protobuf.Timestamp update_time = 10304 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The updateTime.
*/
com.google.protobuf.Timestamp getUpdateTime();
/**
*
*
* <pre>
* Output only. The timestamp when the resource was last updated. Any
* change to the resource made by users must refresh this value.
* Changes to a resource made by the service should refresh this value.
* </pre>
*
* <code>
* .google.protobuf.Timestamp update_time = 10304 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder();
}
|
apache/solr | 37,968 | solr/solrj/src/java/org/noggit/JSONParser.java | /*
* Copyright 2006- Yonik Seeley
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.noggit;
import java.io.IOException;
import java.io.Reader;
public class JSONParser {
/** Event indicating a JSON string value, including member names of objects */
public static final int STRING = 1;
/** Event indicating a JSON number value which fits into a signed 64-bit integer */
public static final int LONG = 2;
/**
* Event indicating a JSON number value which has a fractional part or an exponent and with string
* length <= 23 chars not including sign. This covers all representations of normal values for
* Double.toString().
*/
public static final int NUMBER = 3;
/**
* Event indicating a JSON number value that was not produced by toString of any Java primitive
* numerics such as Double or Long. It is either an integer outside the range of a 64-bit signed
* integer, or a floating point value with a string representation of more than 23 chars.
*/
public static final int BIGNUMBER = 4;
/** Event indicating a JSON boolean */
public static final int BOOLEAN = 5;
/** Event indicating a JSON null */
public static final int NULL = 6;
/** Event indicating the start of a JSON object */
public static final int OBJECT_START = 7;
/** Event indicating the end of a JSON object */
public static final int OBJECT_END = 8;
/** Event indicating the start of a JSON array */
public static final int ARRAY_START = 9;
/** Event indicating the end of a JSON array */
public static final int ARRAY_END = 10;
/** Event indicating the end of input has been reached */
public static final int EOF = 11;
/** Flags to control parsing behavior */
public static final int ALLOW_COMMENTS = 1 << 0;
public static final int ALLOW_SINGLE_QUOTES = 1 << 1;
public static final int ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER = 1 << 2;
public static final int ALLOW_UNQUOTED_KEYS = 1 << 3;
public static final int ALLOW_UNQUOTED_STRING_VALUES = 1 << 4;
/**
* ALLOW_EXTRA_COMMAS causes any number of extra commas in arrays and objects to be ignored Note
* that a trailing comma in [] would be [,], hence calling the feature "trailing" commas is either
* limiting or misleading. Since trailing commas are fundamentally incompatible with any future
* "fill-in-missing-values-with-null", it was decided to extend this feature to handle any number
* of extra commas.
*/
public static final int ALLOW_EXTRA_COMMAS = 1 << 5;
public static final int ALLOW_MISSING_COLON_COMMA_BEFORE_OBJECT = 1 << 6;
public static final int OPTIONAL_OUTER_BRACES = 1 << 7;
public static final int FLAGS_STRICT = 0;
public static final int FLAGS_DEFAULT =
ALLOW_COMMENTS
| ALLOW_SINGLE_QUOTES
| ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER
| ALLOW_UNQUOTED_KEYS
| ALLOW_UNQUOTED_STRING_VALUES
| ALLOW_EXTRA_COMMAS;
public static class ParseException extends RuntimeException {
public ParseException(String msg) {
super(msg);
}
}
public static String getEventString(int e) {
switch (e) {
case STRING:
return "STRING";
case LONG:
return "LONG";
case NUMBER:
return "NUMBER";
case BIGNUMBER:
return "BIGNUMBER";
case BOOLEAN:
return "BOOLEAN";
case NULL:
return "NULL";
case OBJECT_START:
return "OBJECT_START";
case OBJECT_END:
return "OBJECT_END";
case ARRAY_START:
return "ARRAY_START";
case ARRAY_END:
return "ARRAY_END";
case EOF:
return "EOF";
}
return "Unknown: " + e;
}
private static final CharArr devNull = new CharArr.NullCharArr();
protected int flags = FLAGS_DEFAULT;
protected final char[] buf; // input buffer with JSON text in it
protected int start; // current position in the buffer
protected int end; // end position in the buffer (one past last valid index)
protected final Reader in; // optional reader to obtain data from
protected boolean eof = false; // true if the end of the stream was reached.
protected long gpos; // global position = gpos + start
protected int event; // last event read
// The terminator for the last string we read: single quote, double quote, or 0 for unterminated.
protected int stringTerm;
protected boolean missingOpeningBrace = false;
public JSONParser(Reader in) {
this(in, new char[8192]);
// 8192 matches the default buffer size of a BufferedReader so double
// buffering of the data is avoided.
}
public JSONParser(Reader in, char[] buffer) {
this.in = in;
this.buf = buffer;
}
// idea - if someone passes us a CharArrayReader, we could
// directly use that buffer as it's protected.
public JSONParser(char[] data, int start, int end) {
this.in = null;
this.buf = data;
this.start = start;
this.end = end;
}
public JSONParser(String data) {
this(data, 0, data.length());
}
public JSONParser(String data, int start, int end) {
this.in = null;
this.start = start;
this.end = end;
this.buf = new char[end - start];
data.getChars(start, end, buf, 0);
}
public int getFlags() {
return flags;
}
public int setFlags(int flags) {
int oldFlags = flags;
this.flags = flags;
return oldFlags;
}
// temporary output buffer
private final CharArr out = new CharArr(64);
// We need to keep some state in order to (at a minimum) know if
// we should skip ',' or ':'.
private byte[] stack = new byte[16];
private int ptr = 0; // pointer into the stack of parser states
private byte state = 0; // current parser state
// parser states stored in the stack
private static final byte DID_OBJSTART = 1; // '{' just read
private static final byte DID_ARRSTART = 2; // '[' just read
private static final byte DID_ARRELEM = 3; // array element just read
private static final byte DID_MEMNAME = 4; // object member name (map key) just read
private static final byte DID_MEMVAL = 5; // object member value (map val) just read
// info about value that was just read (or is in the middle of being read)
private int valstate;
// push current parser state (use at start of new container)
private final void push() {
if (ptr >= stack.length) {
// doubling here is probably overkill, but anything that needs to double more than
// once (32 levels deep) is very atypical anyway.
byte[] newstack = new byte[stack.length << 1];
System.arraycopy(stack, 0, newstack, 0, stack.length);
stack = newstack;
}
stack[ptr++] = state;
}
// pop parser state (use at end of container)
private final void pop() {
if (--ptr < 0) {
throw err("Unbalanced container");
} else {
state = stack[ptr];
}
}
protected void fill() throws IOException {
if (in != null) {
gpos += end;
start = 0;
int num = in.read(buf, 0, buf.length);
end = num >= 0 ? num : 0;
}
if (start >= end) eof = true;
}
private void getMore() throws IOException {
fill();
if (start >= end) {
throw err(null);
}
}
protected int getChar() throws IOException {
if (start >= end) {
fill();
if (start >= end) return -1;
}
return buf[start++];
}
/**
* Returns true if the given character is considered to be whitespace. One difference between
* Java's Character.isWhitespace() is that this method considers a hard space (non-breaking space,
* or nbsp) to be whitespace.
*/
protected static final boolean isWhitespace(int ch) {
return (Character.isWhitespace(ch) || ch == 0x00a0);
}
private static final long WS_MASK =
(1L << ' ')
| (1L << '\t')
| (1L << '\r')
| (1L << '\n')
| (1L << '#')
| (1L << '/')
| (0x01); // set 1 bit so 0xA0 will be flagged as whitespace
protected int getCharNWS() throws IOException {
for (; ; ) {
int ch = getChar();
// getCharNWS is normally called in the context of expecting certain JSON special characters
// such as ":}"]," all of these characters are below 64, including comment chars '/' and '#',
// so we can make this the fast path even w/o checking the range first. We'll only get some
// false-positives while using bare strings (chars "IJMc")
if (((WS_MASK >> ch) & 0x01) == 0) {
return ch;
} else //noinspection StatementWithEmptyBody
if (ch <= ' ') {
// this will only be true if one of the whitespace bits was set
} else if (ch == '/') {
getSlashComment();
} else if (ch == '#') {
getNewlineComment();
} else if (!isWhitespace(ch)) {
// we'll only reach here with certain bare strings, errors, or strange whitespace like 0xa0
return ch;
}
/*
// getCharNWS is normally called in the context of expecting certain JSON special characters
// such as ":}"],"
// all of these characters are below 64 (including comment chars '/' and '#', so we can make this the fast path
if (ch < 64) {
if (((WS_MASK >> ch) & 0x01) == 0) return ch;
if (ch <= ' ') continue; // whitespace below a normal space
if (ch=='/') {
getSlashComment();
} else if (ch=='#') {
getNewlineComment();
}
} else if (!isWhitespace(ch)) { // check for higher whitespace like 0xA0
return ch;
}
*/
/* older code
switch (ch) { case ' ' : case '\t' : case '\r' : case '\n' : continue outer;
case '#' : getNewlineComment(); continue outer; case '/' : getSlashComment(); continue
outer; default: return ch; }
*/
}
}
protected int getCharNWS(int ch) throws IOException {
for (; ; ) {
// getCharNWS is normally called in the context of expecting certain JSON special characters
// such as ":}"]," all of these characters are below 64, including comment chars '/' and '#',
// so we can make this the fast path even w/o checking the range first. We'll only get some
// false-positives while using bare strings (chars "IJMc")
if (((WS_MASK >> ch) & 0x01) == 0) {
return ch;
} else if (ch <= ' ') {
// this will only be true if one of the whitespace bits was set whitespace... get new char
// at bottom of loop
} else if (ch == '/') {
getSlashComment();
} else if (ch == '#') {
getNewlineComment();
} else if (!isWhitespace(ch)) {
// we'll only reach here with certain bare strings, errors, or strange whitespace like 0xa0
return ch;
}
ch = getChar();
}
}
protected int getCharExpected(int expected) throws IOException {
for (; ; ) {
int ch = getChar();
if (ch == expected) return expected;
if (ch == ' ') continue;
return getCharNWS(ch);
}
}
protected void getNewlineComment() throws IOException {
// read a # or a //, so go until newline
for (; ; ) {
int ch = getChar();
// don't worry about DOS /r/n... we'll stop on the \r and let the rest of the whitespace
// eater consume the \n
if (ch == '\n' || ch == '\r' || ch == -1) {
return;
}
}
}
protected void getSlashComment() throws IOException {
int ch = getChar();
if (ch == '/') {
getNewlineComment();
return;
}
if (ch != '*') {
throw err("Invalid comment: expected //, /*, or #");
}
ch = getChar();
for (; ; ) {
if (ch == '*') {
ch = getChar();
if (ch == '/') {
return;
} else if (ch == '*') {
// handle cases of *******/
continue;
}
}
if (ch == -1) {
return;
}
ch = getChar();
}
}
protected boolean matchBareWord(char[] arr) throws IOException {
for (int i = 1; i < arr.length; i++) {
int ch = getChar();
if (ch != arr[i]) {
if ((flags & ALLOW_UNQUOTED_STRING_VALUES) == 0) {
throw err("Expected " + new String(arr));
} else {
stringTerm = 0;
out.reset();
out.write(arr, 0, i);
if (!eof) {
start--;
}
return false;
}
}
}
// if we don't allow bare strings, we don't need to check that the string actually terminates...
// just let things fail as the parser tries to continue
if ((flags & ALLOW_UNQUOTED_STRING_VALUES) == 0) {
return true;
}
// check that the string actually terminates... for example trueX should return false
int ch = getChar();
if (eof) {
return true;
} else if (!isUnquotedStringChar(ch)) {
start--;
return true;
}
// we encountered something like "trueX" when matching "true"
stringTerm = 0;
out.reset();
out.unsafeWrite(arr, 0, arr.length);
out.unsafeWrite(ch);
return false;
}
protected ParseException err(String msg) {
// We can't tell if EOF was hit by comparing start<=end
// because the illegal char could have been the last in the buffer
// or in the stream. To deal with this, the "eof" var was introduced
if (!eof && start > 0) start--; // backup one char
String chs = "char=" + ((start >= end) ? "(EOF)" : "" + buf[start]);
String pos = "position=" + (gpos + start);
String tot = chs + ',' + pos + getContext();
if (msg == null) {
if (start >= end) msg = "Unexpected EOF";
else msg = "JSON Parse Error";
}
return new ParseException(msg + ": " + tot);
}
private String getContext() {
String context = "";
if (start >= 0) {
context += " AFTER='" + errEscape(Math.max(start - 60, 0), start + 1) + "'";
}
if (start < end) {
context += " BEFORE='" + errEscape(start + 1, start + 40) + "'";
}
return context;
}
private String errEscape(int a, int b) {
b = Math.min(b, end);
if (a >= b) return "";
return new String(buf, a, b - a).replaceAll("\\s+", " ");
}
private boolean bool; // boolean value read
private long lval; // long value read
private int nstate; // current state while reading a number
private static final int HAS_FRACTION = 0x01; // nstate flag, '.' already read
private static final int HAS_EXPONENT = 0x02; // nstate flag, '[eE][+-]?[0-9]' already read
/**
* Returns the long read... only significant if valstate==LONG after this call. firstChar should
* be the first numeric digit read.
*/
private long readNumber(int firstChar, boolean isNeg) throws IOException {
out.unsafeWrite(firstChar); // unsafe OK since we know output is big enough
// We build up the number in the negative plane since it's larger (by one) than
// the positive plane.
long v = (long) '0' - firstChar;
// can't overflow a long in 18 decimal digits (i.e. 17 additional after the first).
// we also need 22 additional to handle double, so we'll handle in 2 separate loops.
int i;
for (i = 0; i < 17; i++) {
int ch = getChar();
// TODO: is this switch faster as an if-then-else?
switch (ch) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
v = v * 10 - (ch - '0');
out.unsafeWrite(ch);
continue;
case '.':
out.unsafeWrite('.');
valstate = readFrac(out, 22 - i);
return 0;
case 'e':
case 'E':
out.unsafeWrite(ch);
nstate = 0;
valstate = readExp(out, 22 - i);
return 0;
default:
// return the number, relying on nextEvent() to return an error
// for invalid chars following the number.
if (ch != -1) --start; // push back last char if not EOF
valstate = LONG;
return isNeg ? v : -v;
}
}
// after this, we could overflow a long and need to do extra checking
boolean overflow = false;
long maxval = isNeg ? Long.MIN_VALUE : -Long.MAX_VALUE;
for (; i < 22; i++) {
int ch = getChar();
switch (ch) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
if (v < (0x8000000000000000L / 10))
overflow = true; // can't multiply by 10 w/o overflowing
v *= 10;
int digit = ch - '0';
if (v < maxval + digit) overflow = true; // can't add digit w/o overflowing
v -= digit;
out.unsafeWrite(ch);
continue;
case '.':
out.unsafeWrite('.');
valstate = readFrac(out, 22 - i);
return 0;
case 'e':
case 'E':
out.unsafeWrite(ch);
nstate = 0;
valstate = readExp(out, 22 - i);
return 0;
default:
// return the number, relying on nextEvent() to return an error
// for invalid chars following the number.
if (ch != -1) --start; // push back last char if not EOF
valstate = overflow ? BIGNUMBER : LONG;
return isNeg ? v : -v;
}
}
nstate = 0;
valstate = BIGNUMBER;
return 0;
}
// read digits right of decimal point
private int readFrac(CharArr arr, int lim) throws IOException {
nstate = HAS_FRACTION; // deliberate set instead of '|'
while (--lim >= 0) {
int ch = getChar();
if (ch >= '0' && ch <= '9') {
arr.write(ch);
} else if (ch == 'e' || ch == 'E') {
arr.write(ch);
return readExp(arr, lim);
} else {
if (ch != -1) start--; // back up
return NUMBER;
}
}
return BIGNUMBER;
}
// call after 'e' or 'E' has been seen to read the rest of the exponent
private int readExp(CharArr arr, int lim) throws IOException {
nstate |= HAS_EXPONENT;
int ch = getChar();
lim--;
if (ch == '+' || ch == '-') {
arr.write(ch);
ch = getChar();
lim--;
}
// make sure at least one digit is read.
if (ch < '0' || ch > '9') {
throw err("missing exponent number");
}
arr.write(ch);
return readExpDigits(arr, lim);
}
// continuation of readExpStart
private int readExpDigits(CharArr arr, int lim) throws IOException {
while (--lim >= 0) {
int ch = getChar();
if (ch >= '0' && ch <= '9') {
arr.write(ch);
} else {
if (ch != -1) start--; // back up
return NUMBER;
}
}
return BIGNUMBER;
}
private void continueNumber(CharArr arr) throws IOException {
if (arr != out) arr.write(out);
if ((nstate & HAS_EXPONENT) != 0) {
readExpDigits(arr, Integer.MAX_VALUE);
return;
}
if (nstate != 0) {
readFrac(arr, Integer.MAX_VALUE);
return;
}
for (; ; ) {
int ch = getChar();
if (ch >= '0' && ch <= '9') {
arr.write(ch);
} else if (ch == '.') {
arr.write(ch);
readFrac(arr, Integer.MAX_VALUE);
return;
} else if (ch == 'e' || ch == 'E') {
arr.write(ch);
readExp(arr, Integer.MAX_VALUE);
return;
} else {
if (ch != -1) start--;
return;
}
}
}
private int hexval(int hexdig) {
if (hexdig >= '0' && hexdig <= '9') {
return hexdig - '0';
} else if (hexdig >= 'A' && hexdig <= 'F') {
return hexdig + (10 - 'A');
} else if (hexdig >= 'a' && hexdig <= 'f') {
return hexdig + (10 - 'a');
}
throw err("invalid hex digit");
}
// backslash has already been read when this is called
private char readEscapedChar() throws IOException {
int ch = getChar();
switch (ch) {
case '"':
return '"';
case '\'':
return '\'';
case '\\':
return '\\';
case '/':
return '/';
case 'n':
return '\n';
case 'r':
return '\r';
case 't':
return '\t';
case 'f':
return '\f';
case 'b':
return '\b';
case 'u':
return (char)
((hexval(getChar()) << 12)
| (hexval(getChar()) << 8)
| (hexval(getChar()) << 4)
| (hexval(getChar())));
}
if ((flags & ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER) != 0 && ch != EOF) {
return (char) ch;
}
throw err("Invalid character escape");
}
// a dummy buffer we can use to point at other buffers
private final CharArr tmp = new CharArr(null, 0, 0);
private CharArr readStringChars() throws IOException {
if (stringTerm == 0) {
// "out" will already contain the first part of the bare string, so don't reset it
readStringBare(out);
return out;
}
char terminator = (char) stringTerm;
int i;
for (i = start; i < end; i++) {
char c = buf[i];
if (c == terminator) {
tmp.set(buf, start, i); // directly use input buffer
start = i + 1; // advance past last '"'
return tmp;
} else if (c == '\\') {
break;
}
}
out.reset();
readStringChars2(out, i);
return out;
}
// middle is the pointer to the middle of a buffer to start scanning for a non-string
// character ('"' or "/"). start<=middle<end
// this should be faster for strings with fewer escapes, but probably slower for many escapes.
private void readStringChars2(CharArr arr, int middle) throws IOException {
if (stringTerm == 0) {
readStringBare(arr);
return;
}
char terminator = (char) stringTerm;
for (; ; ) {
if (middle >= end) {
arr.write(buf, start, middle - start);
start = middle;
getMore();
middle = start;
}
int ch = buf[middle++];
if (ch == terminator) {
int len = middle - start - 1;
if (len > 0) arr.write(buf, start, len);
start = middle;
return;
} else if (ch == '\\') {
int len = middle - start - 1;
if (len > 0) arr.write(buf, start, len);
start = middle;
arr.write(readEscapedChar());
middle = start;
}
}
}
private void readStringBare(CharArr arr) throws IOException {
if (arr != out) {
arr.append(out);
}
for (; ; ) {
int ch = getChar();
if (!isUnquotedStringChar(ch)) {
if (ch == -1) break;
if (ch == '\\') {
arr.write(readEscapedChar());
continue;
}
start--;
break;
}
if (ch == '\\') {
arr.write(readEscapedChar());
continue;
}
arr.write(ch);
}
}
// isName==true if this is a field name (as opposed to a value)
protected void handleNonDoubleQuoteString(int ch, boolean isName) throws IOException {
if (ch == '\'') {
stringTerm = ch;
if ((flags & ALLOW_SINGLE_QUOTES) == 0) {
throw err("Single quoted strings not allowed");
}
} else {
if ((isName && (flags & ALLOW_UNQUOTED_KEYS) == 0)
|| (!isName && (flags & ALLOW_UNQUOTED_STRING_VALUES) == 0)
|| eof) {
if (isName) {
throw err("Expected quoted string");
} else {
throw err(null);
}
}
if (!isUnquotedStringStart(ch)) {
throw err(null);
}
stringTerm = 0; // signal for unquoted string
out.reset();
out.unsafeWrite(ch);
}
}
private static boolean isUnquotedStringStart(int ch) {
return Character.isJavaIdentifierStart(ch);
}
// What characters are allowed to continue an unquoted string
// once we know we are in one.
private static boolean isUnquotedStringChar(int ch) {
return Character.isJavaIdentifierPart(ch) || ch == '.' || ch == '-' || ch == '/';
// would checking for a-z first speed up the common case?
// possibly much more liberal unquoted string handling...
/*
switch (ch) {
case -1:
case ' ':
case '\t':
case '\r':
case '\n':
case '}':
case ']':
case ',':
case ':':
case '=': // reserved for future use
case '\\': // check for backslash should come after this function call
return false;
}
return true;
*/
}
/* alternate implementation
// middle is the pointer to the middle of a buffer to start scanning for a non-string
// character ('"' or "/"). start<=middle<end
private void readStringChars2a(CharArr arr, int middle) throws IOException {
int ch = 0;
for (; ; ) {
// find the next non-string char
for (; middle < end; middle++) {
ch = buf[middle];
if (ch == '"' || ch == '\\') break;
}
arr.write(buf, start, middle - start);
if (middle >= end) {
getMore();
middle = start;
} else {
start = middle + 1; // set buffer pointer to correct spot
if (ch == '"') {
valstate = 0;
return;
} else if (ch == '\\') {
arr.write(readEscapedChar());
if (start >= end) getMore();
middle = start;
}
}
}
}
*/
// return the next event when parser is in a neutral state (no
// map separators or array element separators to read
private int next(int ch) throws IOException {
// TODO: try my own form of indirect jump... look up char class and index directly into handling
// implementation?
for (; ; ) {
switch (ch) {
// this is not the exclusive list of whitespace chars... the rest are handled in default:
case ' ':
case '\t':
case '\r':
case '\n':
// calling getCharNWS here seems faster than letting the switch handle it
ch = getCharNWS();
break;
case '"':
stringTerm = '"';
valstate = STRING;
return STRING;
case '\'':
if ((flags & ALLOW_SINGLE_QUOTES) == 0) {
throw err("Single quoted strings not allowed");
}
stringTerm = '\'';
valstate = STRING;
return STRING;
case '{':
push();
state = DID_OBJSTART;
return OBJECT_START;
case '[':
push();
state = DID_ARRSTART;
return ARRAY_START;
case '0':
out.reset();
// special case '0'? If next char isn't '.' val=0
ch = getChar();
if (ch == '.') {
start--;
ch = '0';
readNumber('0', false);
return valstate;
} else if (ch > '9' || ch < '0') {
out.unsafeWrite('0');
if (ch != -1) start--;
lval = 0;
valstate = LONG;
return LONG;
} else {
throw err("Leading zeros not allowed");
}
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
out.reset();
lval = readNumber(ch, false);
return valstate;
case '-':
out.reset();
out.unsafeWrite('-');
ch = getChar();
if (ch < '0' || ch > '9') throw err("expected digit after '-'");
lval = readNumber(ch, true);
return valstate;
case 't':
// TODO: test performance of this non-branching inline version.
// if ((('r'-getChar())|('u'-getChar())|('e'-getChar())) != 0) throw err("");
if (matchBareWord(JSONUtil.TRUE_CHARS)) {
bool = true;
valstate = BOOLEAN;
return valstate;
} else {
valstate = STRING;
return STRING;
}
case 'f':
if (matchBareWord(JSONUtil.FALSE_CHARS)) {
bool = false;
valstate = BOOLEAN;
return valstate;
} else {
valstate = STRING;
return STRING;
}
case 'n':
if (matchBareWord(JSONUtil.NULL_CHARS)) {
valstate = NULL;
return valstate;
} else {
valstate = STRING;
return STRING;
}
case '/':
getSlashComment();
ch = getChar();
break;
case '#':
getNewlineComment();
ch = getChar();
break;
case ']': // This only happens with a trailing comma (or an error)
if (state != DID_ARRELEM || (flags & ALLOW_EXTRA_COMMAS) == 0) {
throw err("Unexpected array closer ]");
}
pop();
return event = ARRAY_END;
case '}': // This only happens with a trailing comma (or an error)
if (state != DID_MEMVAL || (flags & ALLOW_EXTRA_COMMAS) == 0) {
throw err("Unexpected object closer }");
}
pop();
return event = ARRAY_END;
case ',': // This only happens with input like [1,]
if ((state != DID_ARRELEM && state != DID_MEMVAL) || (flags & ALLOW_EXTRA_COMMAS) == 0) {
throw err("Unexpected comma");
}
ch = getChar();
break;
case -1:
if (getLevel() > 0) throw err("Premature EOF");
return EOF;
default:
// Handle unusual unicode whitespace like no-break space (0xA0)
if (isWhitespace(ch)) {
ch = getChar(); // getCharNWS() would also work
break;
}
handleNonDoubleQuoteString(ch, false);
valstate = STRING;
return STRING;
// throw err(null);
}
}
}
@Override
public String toString() {
return "start=" + start + ",end=" + end + ",state=" + state + "valstate=" + valstate;
}
/**
* Returns the next event encountered in the JSON stream, one of
*
* <ul>
* <li>{@link #STRING}
* <li>{@link #LONG}
* <li>{@link #NUMBER}
* <li>{@link #BIGNUMBER}
* <li>{@link #BOOLEAN}
* <li>{@link #NULL}
* <li>{@link #OBJECT_START}
* <li>{@link #OBJECT_END}
* <li>{@link #OBJECT_END}
* <li>{@link #ARRAY_START}
* <li>{@link #ARRAY_END}
* <li>{@link #EOF}
* </ul>
*/
public int nextEvent() throws IOException {
if (valstate != 0) {
if (valstate == STRING) {
readStringChars2(devNull, start);
} else if (valstate == BIGNUMBER) {
continueNumber(devNull);
}
valstate = 0;
}
int ch;
outer:
for (; ; ) {
switch (state) {
case 0:
event = next(getChar());
if (event == STRING && (flags & OPTIONAL_OUTER_BRACES) != 0) {
if (start > 0) start--;
missingOpeningBrace = true;
stringTerm = 0;
valstate = 0;
event = next('{');
}
return event;
case DID_OBJSTART:
ch = getCharExpected('"');
if (ch == '}') {
pop();
return event = OBJECT_END;
}
if (ch == '"') {
stringTerm = ch;
} else if (ch == ',' && (flags & ALLOW_EXTRA_COMMAS) != 0) {
continue outer;
} else {
handleNonDoubleQuoteString(ch, true);
}
state = DID_MEMNAME;
valstate = STRING;
return event = STRING;
case DID_MEMNAME:
ch = getCharExpected(':');
if (ch != ':') {
if ((ch == '{' || ch == '[')
&& (flags & ALLOW_MISSING_COLON_COMMA_BEFORE_OBJECT) != 0) {
start--;
} else {
throw err("Expected key,value separator ':'");
}
}
state = DID_MEMVAL; // set state first because it might be pushed...
return event = next(getChar());
case DID_MEMVAL:
ch = getCharExpected(',');
if (ch == '}') {
pop();
return event = OBJECT_END;
} else if (ch != ',') {
if ((flags & ALLOW_EXTRA_COMMAS) != 0
&& (ch == '\'' || ch == '"' || Character.isLetter(ch))) {
start--;
} else if (missingOpeningBrace && ch == -1 && (flags & OPTIONAL_OUTER_BRACES) != 0) {
missingOpeningBrace = false;
pop();
return event = OBJECT_END;
} else throw err("Expected ',' or '}'");
}
ch = getCharExpected('"');
if (ch == '"') {
stringTerm = ch;
} else if ((ch == ',' || ch == '}') && (flags & ALLOW_EXTRA_COMMAS) != 0) {
if (ch == ',') continue outer;
pop();
return event = OBJECT_END;
} else {
handleNonDoubleQuoteString(ch, true);
}
state = DID_MEMNAME;
valstate = STRING;
return event = STRING;
case DID_ARRSTART:
ch = getCharNWS();
if (ch == ']') {
pop();
return event = ARRAY_END;
}
state = DID_ARRELEM; // set state first, might be pushed...
return event = next(ch);
case DID_ARRELEM:
ch = getCharExpected(',');
if (ch == ',') {
// state = DID_ARRELEM; // redundant
return event = next(getChar());
} else if (ch == ']') {
pop();
return event = ARRAY_END;
} else {
if ((ch == '{' || ch == '[')
&& (flags & ALLOW_MISSING_COLON_COMMA_BEFORE_OBJECT) != 0) {
return event = next(ch);
} else {
throw err("Expected ',' or ']'");
}
}
}
} // end for(;;)
}
public int lastEvent() {
return event;
}
public boolean wasKey() {
return state == DID_MEMNAME;
}
private void goTo(int what) throws IOException {
if (valstate == what) {
valstate = 0;
return;
}
if (valstate == 0) {
/*int ev = */ nextEvent(); // TODO
if (valstate != what) {
throw err("type mismatch");
}
valstate = 0;
} else {
throw err("type mismatch");
}
}
/** Returns the JSON string value, decoding any escaped characters. */
public String getString() throws IOException {
return getStringChars().toString();
}
/**
* Returns the characters of a JSON string value, decoding any escaped characters. The underlying
* buffer of the returned <code>CharArr</code> should *not* be modified as it may be shared with
* the input buffer. The returned <code>CharArr</code> will only be valid up until the next
* JSONParser method is called. Any required data should be read before that point.
*/
public CharArr getStringChars() throws IOException {
goTo(STRING);
return readStringChars();
}
/** Reads a JSON string into the output, decoding any escaped characters. */
public void getString(CharArr output) throws IOException {
goTo(STRING);
readStringChars2(output, start);
}
/**
* Reads a number from the input stream and parses it as a long, only if the value will in fact
* fit into a signed 64-bit integer.
*/
public long getLong() throws IOException {
goTo(LONG);
return lval;
}
/** Reads a number from the input stream and parses it as a double */
public double getDouble() throws IOException {
return Double.parseDouble(getNumberChars().toString());
}
/**
* Returns the characters of a JSON numeric value.
*
* <p>The underlying buffer of the returned <code>CharArr</code> should *not* be modified as it
* may be shared with the input buffer.
*
* <p>The returned <code>CharArr</code> will only be valid up until the next JSONParser method is
* called. Any required data should be read before that point.
*/
public CharArr getNumberChars() throws IOException {
int ev = 0;
if (valstate == 0) ev = nextEvent();
if (valstate == LONG || valstate == NUMBER) {
valstate = 0;
return out;
} else if (valstate == BIGNUMBER) {
continueNumber(out);
valstate = 0;
return out;
} else {
throw err("Unexpected " + ev);
}
}
/** Reads a JSON numeric value into the output. */
public void getNumberChars(CharArr output) throws IOException {
int ev = 0;
if (valstate == 0) ev = nextEvent();
if (valstate == LONG || valstate == NUMBER) output.write(this.out);
else if (valstate == BIGNUMBER) {
continueNumber(output);
} else {
throw err("Unexpected " + ev);
}
valstate = 0;
}
/** Reads a boolean value */
public boolean getBoolean() throws IOException {
goTo(BOOLEAN);
return bool;
}
/** Reads a null value */
public void getNull() throws IOException {
goTo(NULL);
}
/**
* @return the current nesting level, the number of parent objects or arrays.
*/
public int getLevel() {
return ptr;
}
public long getPosition() {
return gpos + start;
}
}
|
googleapis/google-cloud-java | 38,152 | java-netapp/proto-google-cloud-netapp-v1/src/main/java/com/google/cloud/netapp/v1/CreateActiveDirectoryRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/netapp/v1/active_directory.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.netapp.v1;
/**
*
*
* <pre>
* CreateActiveDirectoryRequest for creating an active directory.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.CreateActiveDirectoryRequest}
*/
public final class CreateActiveDirectoryRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.netapp.v1.CreateActiveDirectoryRequest)
CreateActiveDirectoryRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateActiveDirectoryRequest.newBuilder() to construct.
private CreateActiveDirectoryRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateActiveDirectoryRequest() {
parent_ = "";
activeDirectoryId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateActiveDirectoryRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.ActiveDirectoryProto
.internal_static_google_cloud_netapp_v1_CreateActiveDirectoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.ActiveDirectoryProto
.internal_static_google_cloud_netapp_v1_CreateActiveDirectoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.CreateActiveDirectoryRequest.class,
com.google.cloud.netapp.v1.CreateActiveDirectoryRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ACTIVE_DIRECTORY_FIELD_NUMBER = 2;
private com.google.cloud.netapp.v1.ActiveDirectory activeDirectory_;
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the activeDirectory field is set.
*/
@java.lang.Override
public boolean hasActiveDirectory() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The activeDirectory.
*/
@java.lang.Override
public com.google.cloud.netapp.v1.ActiveDirectory getActiveDirectory() {
return activeDirectory_ == null
? com.google.cloud.netapp.v1.ActiveDirectory.getDefaultInstance()
: activeDirectory_;
}
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.netapp.v1.ActiveDirectoryOrBuilder getActiveDirectoryOrBuilder() {
return activeDirectory_ == null
? com.google.cloud.netapp.v1.ActiveDirectory.getDefaultInstance()
: activeDirectory_;
}
public static final int ACTIVE_DIRECTORY_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object activeDirectoryId_ = "";
/**
*
*
* <pre>
* Required. ID of the active directory to create. Must be unique within the
* parent resource. Must contain only letters, numbers and hyphen, with the
* first character a letter , the last a letter or a number, and a 63
* character maximum.
* </pre>
*
* <code>string active_directory_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The activeDirectoryId.
*/
@java.lang.Override
public java.lang.String getActiveDirectoryId() {
java.lang.Object ref = activeDirectoryId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
activeDirectoryId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. ID of the active directory to create. Must be unique within the
* parent resource. Must contain only letters, numbers and hyphen, with the
* first character a letter , the last a letter or a number, and a 63
* character maximum.
* </pre>
*
* <code>string active_directory_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for activeDirectoryId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getActiveDirectoryIdBytes() {
java.lang.Object ref = activeDirectoryId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
activeDirectoryId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getActiveDirectory());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(activeDirectoryId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, activeDirectoryId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getActiveDirectory());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(activeDirectoryId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, activeDirectoryId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.netapp.v1.CreateActiveDirectoryRequest)) {
return super.equals(obj);
}
com.google.cloud.netapp.v1.CreateActiveDirectoryRequest other =
(com.google.cloud.netapp.v1.CreateActiveDirectoryRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasActiveDirectory() != other.hasActiveDirectory()) return false;
if (hasActiveDirectory()) {
if (!getActiveDirectory().equals(other.getActiveDirectory())) return false;
}
if (!getActiveDirectoryId().equals(other.getActiveDirectoryId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasActiveDirectory()) {
hash = (37 * hash) + ACTIVE_DIRECTORY_FIELD_NUMBER;
hash = (53 * hash) + getActiveDirectory().hashCode();
}
hash = (37 * hash) + ACTIVE_DIRECTORY_ID_FIELD_NUMBER;
hash = (53 * hash) + getActiveDirectoryId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.netapp.v1.CreateActiveDirectoryRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* CreateActiveDirectoryRequest for creating an active directory.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.CreateActiveDirectoryRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.netapp.v1.CreateActiveDirectoryRequest)
com.google.cloud.netapp.v1.CreateActiveDirectoryRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.ActiveDirectoryProto
.internal_static_google_cloud_netapp_v1_CreateActiveDirectoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.ActiveDirectoryProto
.internal_static_google_cloud_netapp_v1_CreateActiveDirectoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.CreateActiveDirectoryRequest.class,
com.google.cloud.netapp.v1.CreateActiveDirectoryRequest.Builder.class);
}
// Construct using com.google.cloud.netapp.v1.CreateActiveDirectoryRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getActiveDirectoryFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
activeDirectory_ = null;
if (activeDirectoryBuilder_ != null) {
activeDirectoryBuilder_.dispose();
activeDirectoryBuilder_ = null;
}
activeDirectoryId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.netapp.v1.ActiveDirectoryProto
.internal_static_google_cloud_netapp_v1_CreateActiveDirectoryRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateActiveDirectoryRequest getDefaultInstanceForType() {
return com.google.cloud.netapp.v1.CreateActiveDirectoryRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateActiveDirectoryRequest build() {
com.google.cloud.netapp.v1.CreateActiveDirectoryRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateActiveDirectoryRequest buildPartial() {
com.google.cloud.netapp.v1.CreateActiveDirectoryRequest result =
new com.google.cloud.netapp.v1.CreateActiveDirectoryRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.netapp.v1.CreateActiveDirectoryRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.activeDirectory_ =
activeDirectoryBuilder_ == null ? activeDirectory_ : activeDirectoryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.activeDirectoryId_ = activeDirectoryId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.netapp.v1.CreateActiveDirectoryRequest) {
return mergeFrom((com.google.cloud.netapp.v1.CreateActiveDirectoryRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.netapp.v1.CreateActiveDirectoryRequest other) {
if (other == com.google.cloud.netapp.v1.CreateActiveDirectoryRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasActiveDirectory()) {
mergeActiveDirectory(other.getActiveDirectory());
}
if (!other.getActiveDirectoryId().isEmpty()) {
activeDirectoryId_ = other.activeDirectoryId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getActiveDirectoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
activeDirectoryId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.netapp.v1.ActiveDirectory activeDirectory_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.ActiveDirectory,
com.google.cloud.netapp.v1.ActiveDirectory.Builder,
com.google.cloud.netapp.v1.ActiveDirectoryOrBuilder>
activeDirectoryBuilder_;
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the activeDirectory field is set.
*/
public boolean hasActiveDirectory() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The activeDirectory.
*/
public com.google.cloud.netapp.v1.ActiveDirectory getActiveDirectory() {
if (activeDirectoryBuilder_ == null) {
return activeDirectory_ == null
? com.google.cloud.netapp.v1.ActiveDirectory.getDefaultInstance()
: activeDirectory_;
} else {
return activeDirectoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setActiveDirectory(com.google.cloud.netapp.v1.ActiveDirectory value) {
if (activeDirectoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
activeDirectory_ = value;
} else {
activeDirectoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setActiveDirectory(
com.google.cloud.netapp.v1.ActiveDirectory.Builder builderForValue) {
if (activeDirectoryBuilder_ == null) {
activeDirectory_ = builderForValue.build();
} else {
activeDirectoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeActiveDirectory(com.google.cloud.netapp.v1.ActiveDirectory value) {
if (activeDirectoryBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& activeDirectory_ != null
&& activeDirectory_
!= com.google.cloud.netapp.v1.ActiveDirectory.getDefaultInstance()) {
getActiveDirectoryBuilder().mergeFrom(value);
} else {
activeDirectory_ = value;
}
} else {
activeDirectoryBuilder_.mergeFrom(value);
}
if (activeDirectory_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearActiveDirectory() {
bitField0_ = (bitField0_ & ~0x00000002);
activeDirectory_ = null;
if (activeDirectoryBuilder_ != null) {
activeDirectoryBuilder_.dispose();
activeDirectoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.ActiveDirectory.Builder getActiveDirectoryBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getActiveDirectoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.ActiveDirectoryOrBuilder getActiveDirectoryOrBuilder() {
if (activeDirectoryBuilder_ != null) {
return activeDirectoryBuilder_.getMessageOrBuilder();
} else {
return activeDirectory_ == null
? com.google.cloud.netapp.v1.ActiveDirectory.getDefaultInstance()
: activeDirectory_;
}
}
/**
*
*
* <pre>
* Required. Fields of the to be created active directory.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.ActiveDirectory active_directory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.ActiveDirectory,
com.google.cloud.netapp.v1.ActiveDirectory.Builder,
com.google.cloud.netapp.v1.ActiveDirectoryOrBuilder>
getActiveDirectoryFieldBuilder() {
if (activeDirectoryBuilder_ == null) {
activeDirectoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.ActiveDirectory,
com.google.cloud.netapp.v1.ActiveDirectory.Builder,
com.google.cloud.netapp.v1.ActiveDirectoryOrBuilder>(
getActiveDirectory(), getParentForChildren(), isClean());
activeDirectory_ = null;
}
return activeDirectoryBuilder_;
}
private java.lang.Object activeDirectoryId_ = "";
/**
*
*
* <pre>
* Required. ID of the active directory to create. Must be unique within the
* parent resource. Must contain only letters, numbers and hyphen, with the
* first character a letter , the last a letter or a number, and a 63
* character maximum.
* </pre>
*
* <code>string active_directory_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The activeDirectoryId.
*/
public java.lang.String getActiveDirectoryId() {
java.lang.Object ref = activeDirectoryId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
activeDirectoryId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. ID of the active directory to create. Must be unique within the
* parent resource. Must contain only letters, numbers and hyphen, with the
* first character a letter , the last a letter or a number, and a 63
* character maximum.
* </pre>
*
* <code>string active_directory_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for activeDirectoryId.
*/
public com.google.protobuf.ByteString getActiveDirectoryIdBytes() {
java.lang.Object ref = activeDirectoryId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
activeDirectoryId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. ID of the active directory to create. Must be unique within the
* parent resource. Must contain only letters, numbers and hyphen, with the
* first character a letter , the last a letter or a number, and a 63
* character maximum.
* </pre>
*
* <code>string active_directory_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The activeDirectoryId to set.
* @return This builder for chaining.
*/
public Builder setActiveDirectoryId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
activeDirectoryId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. ID of the active directory to create. Must be unique within the
* parent resource. Must contain only letters, numbers and hyphen, with the
* first character a letter , the last a letter or a number, and a 63
* character maximum.
* </pre>
*
* <code>string active_directory_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearActiveDirectoryId() {
activeDirectoryId_ = getDefaultInstance().getActiveDirectoryId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. ID of the active directory to create. Must be unique within the
* parent resource. Must contain only letters, numbers and hyphen, with the
* first character a letter , the last a letter or a number, and a 63
* character maximum.
* </pre>
*
* <code>string active_directory_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for activeDirectoryId to set.
* @return This builder for chaining.
*/
public Builder setActiveDirectoryIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
activeDirectoryId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.netapp.v1.CreateActiveDirectoryRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.netapp.v1.CreateActiveDirectoryRequest)
private static final com.google.cloud.netapp.v1.CreateActiveDirectoryRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.netapp.v1.CreateActiveDirectoryRequest();
}
public static com.google.cloud.netapp.v1.CreateActiveDirectoryRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateActiveDirectoryRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateActiveDirectoryRequest>() {
@java.lang.Override
public CreateActiveDirectoryRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateActiveDirectoryRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateActiveDirectoryRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateActiveDirectoryRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/commons-imaging | 38,645 | src/main/java/org/apache/commons/imaging/AbstractImageParser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.imaging;
import java.awt.Dimension;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.imaging.bytesource.ByteSource;
import org.apache.commons.imaging.common.BinaryFileParser;
import org.apache.commons.imaging.common.BufferedImageFactory;
import org.apache.commons.imaging.common.ImageMetadata;
import org.apache.commons.imaging.common.SimpleBufferedImageFactory;
import org.apache.commons.imaging.formats.bmp.BmpImageParser;
import org.apache.commons.imaging.formats.dcx.DcxImageParser;
import org.apache.commons.imaging.formats.gif.GifImageParser;
import org.apache.commons.imaging.formats.icns.IcnsImageParser;
import org.apache.commons.imaging.formats.ico.IcoImageParser;
import org.apache.commons.imaging.formats.jpeg.JpegImageParser;
import org.apache.commons.imaging.formats.pcx.PcxImageParser;
import org.apache.commons.imaging.formats.png.PngImageParser;
import org.apache.commons.imaging.formats.pnm.PnmImageParser;
import org.apache.commons.imaging.formats.psd.PsdImageParser;
import org.apache.commons.imaging.formats.rgbe.RgbeImageParser;
import org.apache.commons.imaging.formats.tiff.TiffImageParser;
import org.apache.commons.imaging.formats.wbmp.WbmpImageParser;
import org.apache.commons.imaging.formats.webp.WebPImageParser;
import org.apache.commons.imaging.formats.xbm.XbmImageParser;
import org.apache.commons.imaging.formats.xpm.XpmImageParser;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
/**
* Provides the abstract base class for all image reading and writing utilities. ImageParser implementations are expected to extend this class providing logic
* for identifying and processing data in their own specific format. Specific implementations are found under the com.apache.commons.imaging.formats package.
*
* <h2>Application Notes</h2>
*
* <h3>Format support</h3>
*
* For the most recent information on format support for the Apache Commons Imaging package, refer to
* <a href="https://commons.apache.org/imaging/formatsupport.html">Format Support</a> at the main project development web site.
*
* <h3>On the accuracy of this Javadoc</h3>
*
* The original authors of this class did not supply documentation. The Javadoc for this class is based on inspection of the source code. In some cases, the
* purpose and usage for particular methods was deduced from the source and may not perfectly reflect the intentions of the original. Therefore, you should not
* assume that the documentation is perfect, especially in the more obscure and specialized areas of implementation.
*
* <h3>The "params" argument</h3>
*
* <p>
* Many of the methods specified by this class accept an argument of type {@code T} defining the parameters to be used when processing an image. For example,
* some of the output formats permit of different kinds of image compression or color models. Some of the reading methods permit the calling application to
* require strict format compliance.
* </p>
*
* @param <T> type of parameters used by this image parser
*/
public abstract class AbstractImageParser<T extends ImagingParameters<T>> extends BinaryFileParser {
private static final Logger LOGGER = Logger.getLogger(AbstractImageParser.class.getName());
/**
* Gets an array of new instances of all image parsers.
*
* @return A valid array of image parsers
*/
public static List<AbstractImageParser<?>> getAllImageParsers() {
return Arrays.asList(new BmpImageParser(), new DcxImageParser(), new GifImageParser(), new IcnsImageParser(), new IcoImageParser(),
new JpegImageParser(), new PcxImageParser(), new PngImageParser(), new PnmImageParser(), new PsdImageParser(), new RgbeImageParser(),
new TiffImageParser(), new WebPImageParser(), new WbmpImageParser(), new XbmImageParser(), new XpmImageParser()
// new JBig2ImageParser(),
// new TgaImageParser(),
);
}
/**
* Constructs a new instance with the default, big-endian, byte order.
*/
public AbstractImageParser() {
// empty
}
/**
* Constructs a new instance.
*
* @param byteOrder the byte order.
*/
public AbstractImageParser(final ByteOrder byteOrder) {
super(byteOrder);
}
/**
* Tests whether the ImageParser implementation can accept the specified file based on its extension.
*
* @param file An valid file reference.
* @return If the parser can accept the format, true; otherwise, false.
*/
public boolean canAcceptExtension(final File file) {
return canAcceptExtension(file.getName());
}
/**
* Tests whether the ImageParser implementation can accept the specified file name based on its extension.
*
* @param fileName A valid string giving a file name or file path.
* @return If the parser can accept the format, true; otherwise, false.
*/
public final boolean canAcceptExtension(final String fileName) {
final String[] extensions = getAcceptedExtensions();
if (extensions == null) {
return true;
}
final int index = fileName.lastIndexOf('.');
if (index >= 0 && ArrayUtils.contains(extensions, StringUtils.toRootLowerCase(fileName.substring(index + 1)))) {
return true;
}
return false;
}
/**
* Tests whether the ImageParser implementation can accept the specified format.
*
* @param imageFormat An instance of ImageFormat.
* @return If the parser can accept the format, true; otherwise, false.
*/
public boolean canAcceptType(final ImageFormat imageFormat) {
return ArrayUtils.contains(getAcceptedTypes(), imageFormat);
}
/**
* Writes the ImageInfo and format-specific information for the image content of the specified byte array to a string.
*
* @param bytes A valid array of bytes.
* @return A valid string.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final String dumpImageFile(final byte[] bytes) throws ImagingException, IOException {
return dumpImageFile(ByteSource.array(bytes));
}
/**
* Writes the ImageInfo and format-specific information for the image content of the specified byte source to a string.
*
* @param byteSource A valid byte source.
* @return A valid string.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final String dumpImageFile(final ByteSource byteSource) throws ImagingException, IOException {
final StringWriter sw = new StringWriter();
final PrintWriter pw = new PrintWriter(sw);
dumpImageFile(pw, byteSource);
pw.flush();
return sw.toString();
}
/**
* Writes the ImageInfo and format-specific information for the image content of the specified file to a string.
*
* @param file A valid file reference.
* @return A valid string.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final String dumpImageFile(final File file) throws ImagingException, IOException {
if (!canAcceptExtension(file)) {
return null;
}
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.finest(getName() + ": " + file.getName());
}
return dumpImageFile(ByteSource.file(file));
}
/**
* Writes the ImageInfo and format-specific information for the image content of the specified byte source to a PrintWriter
*
* @param pw print writer used for writing the ImageInfo
* @param byteSource A valid byte source.
* @return A valid PrintWriter.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public boolean dumpImageFile(final PrintWriter pw, final ByteSource byteSource) throws ImagingException, IOException {
return false;
}
/**
* Gets an array of all accepted extensions
*
* @return A valid array of one or more elements.
*/
protected abstract String[] getAcceptedExtensions();
/**
* Gets an array of ImageFormat objects describing all accepted types
*
* @return A valid array of one or more elements.
*/
protected abstract ImageFormat[] getAcceptedTypes();
/**
* Gets all images specified by the byte array (some formats may include multiple images within a single data source).
*
* @param bytes A valid byte array
* @return A valid (potentially empty) list of BufferedImage objects.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final List<BufferedImage> getAllBufferedImages(final byte[] bytes) throws ImagingException, IOException {
return getAllBufferedImages(ByteSource.array(bytes));
}
/**
* Gets all images specified by the byte source (some formats may include multiple images within a single data source).
*
* @param byteSource A valid instance of ByteSource.
* @return A valid (potentially empty) list of BufferedImage objects.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public List<BufferedImage> getAllBufferedImages(final ByteSource byteSource) throws ImagingException, IOException {
final BufferedImage bi = getBufferedImage(byteSource, null);
final List<BufferedImage> result = new ArrayList<>();
// FIXME this doesn't look like we're actually getting all images contained in the given ByteSource...
result.add(bi);
return result;
}
/**
* Gets all images specified by indicated file (some formats may include multiple images within a single data source).
*
* @param file A valid reference to a file.
* @return A valid (potentially empty) list of BufferedImage objects.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final List<BufferedImage> getAllBufferedImages(final File file) throws ImagingException, IOException {
if (!canAcceptExtension(file)) {
return null;
}
return getAllBufferedImages(ByteSource.file(file));
}
/**
* Gets a buffered image specified by the byte array (for sources that specify multiple images, choice of which image is returned is implementation
* dependent).
*
* @param bytes A valid byte array
* @param params Optional instructions for special-handling or interpretation of the input data (null objects are permitted and must be supported by
* implementations).
* @return A valid instance of BufferedImage.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final BufferedImage getBufferedImage(final byte[] bytes, final T params) throws ImagingException, IOException {
return getBufferedImage(ByteSource.array(bytes), params);
}
/**
* Gets a buffered image specified by the byte source (for sources that specify multiple images, choice of which image is returned is implementation
* dependent).
*
* @param byteSource A valid instance of ByteSource
* @param params Optional instructions for special-handling or interpretation of the input data (null objects are permitted and must be supported by
* implementations).
* @return A valid instance of BufferedImage.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public abstract BufferedImage getBufferedImage(ByteSource byteSource, T params) throws ImagingException, IOException;
/**
* Gets a buffered image specified by the indicated file (for sources that specify multiple images, choice of which image is returned is implementation
* dependent).
*
* @param file A valid file reference.
* @param params Optional instructions for special-handling or interpretation of the input data (null objects are permitted and must be supported by
* implementations).
* @return A valid instance of BufferedImage.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final BufferedImage getBufferedImage(final File file, final T params) throws ImagingException, IOException {
if (!canAcceptExtension(file)) {
return null;
}
return getBufferedImage(ByteSource.file(file), params);
}
/**
* Gets an instance of IBufferedImageFactory based on the presence of a specification for ImagingConstants..BUFFERED_IMAGE_FACTORY within the supplied
* params.
*
* @param params optional parameters.
* @return A valid instance of an implementation of a IBufferedImageFactory.
*/
protected BufferedImageFactory getBufferedImageFactory(final T params) {
if (params == null) {
return new SimpleBufferedImageFactory();
}
final BufferedImageFactory result = params.getBufferedImageFactory();
if (null != result) {
return result;
}
return new SimpleBufferedImageFactory();
}
/**
* Gets the default extension for the format specified by an implementation of ImageParser. Some parsers can support more than one extension (i.e. .JPEG,
* .JPG; .TIF, .TIFF, etc.).
*
* @return A valid string.
*/
public abstract String getDefaultExtension();
/**
* Gets a default parameters instance for this parser.
*
* @return default parameters instance
*/
public abstract T getDefaultParameters();
/**
* Determines the format compliance of the content of the supplied byte array based on rules provided by a specific implementation.
*
* @param bytes A valid byte array.
* @return A valid FormatCompliance object.
* @throws ImagingException may be thrown by sub-classes
* @throws IOException may be thrown by sub-classes
*/
public final FormatCompliance getFormatCompliance(final byte[] bytes) throws ImagingException, IOException {
return getFormatCompliance(ByteSource.array(bytes));
}
/**
* Determines the format compliance of the content of the supplied byte source based on rules provided by a specific implementation.
*
* @param byteSource A valid instance of ByteSource
* @return true if the content is format-compliant; otherwise, false
* @throws ImagingException may be thrown by sub-classes
* @throws IOException may be thrown by sub-classes
*/
public FormatCompliance getFormatCompliance(final ByteSource byteSource) throws ImagingException, IOException {
return null;
}
/**
* Determines the format compliance of the specified file based on rules provided by a specific implementation.
*
* @param file A valid reference to a file.
* @return A valid format compliance object.
* @throws ImagingException may be thrown by sub-classes
* @throws IOException may be thrown by sub-classes
*/
public final FormatCompliance getFormatCompliance(final File file) throws ImagingException, IOException {
if (!canAcceptExtension(file)) {
return null;
}
return getFormatCompliance(ByteSource.file(file));
}
/**
* Gets an array of bytes describing the International Color Consortium (ICC) specification for the color space of the image contained in the input byte
* array. Not all formats support ICC profiles.
*
* @param bytes A valid array of bytes.
* @return If available, a valid array of bytes; otherwise, a null
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final byte[] getIccProfileBytes(final byte[] bytes) throws ImagingException, IOException {
return getIccProfileBytes(bytes, null);
}
/**
* Gets an array of bytes describing the International Color Consortium (ICC) specification for the color space of the image contained in the input byte
* array. Not all formats support ICC profiles.
*
* @param bytes A valid array of bytes.
* @param params Optional instructions for special-handling or interpretation of the input data.
* @return If available, a valid array of bytes; otherwise, a null
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final byte[] getIccProfileBytes(final byte[] bytes, final T params) throws ImagingException, IOException {
return getIccProfileBytes(ByteSource.array(bytes), params);
}
/**
* Gets an array of bytes describing the International Color Consortium (ICC) specification for the color space of the image contained in the input
* byteSource. Not all formats support ICC profiles.
*
* @param byteSource A valid ByteSource.
* @param params Optional instructions for special-handling or interpretation of the input data.
* @return If available, a valid array of bytes; otherwise, a null
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public abstract byte[] getIccProfileBytes(ByteSource byteSource, T params) throws ImagingException, IOException;
/**
* Gets an array of bytes describing the International Color Consortium (ICC) specification for the color space of the image contained in the input file.
* Not all formats support ICC profiles.
*
* @param file A valid file reference.
* @return If available, a valid array of bytes; otherwise, a null
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final byte[] getIccProfileBytes(final File file) throws ImagingException, IOException {
return getIccProfileBytes(file, null);
}
/**
* Gets an array of bytes describing the International Color Consortium (ICC) specification for the color space of the image contained in the input file.
* Not all formats support ICC profiles.
*
* @param file A valid file reference.
* @param params Optional instructions for special-handling or interpretation of the input data.
* @return If available, a valid array of bytes; otherwise, a null
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final byte[] getIccProfileBytes(final File file, final T params) throws ImagingException, IOException {
if (!canAcceptExtension(file)) {
return null;
}
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.finest(getName() + ": " + file.getName());
}
return getIccProfileBytes(ByteSource.file(file), params);
}
/**
* Gets image information from the specified array of bytes. Format-specific ImageParser implementations are expected to return a valid ImageInfo object or
* to throw an ImageReadException if unable to process the specified data.
* <p>
* The params argument provides a mechanism for individual implementations to pass optional information into the parser. Not all formats will require this
* capability. Because the base class may call this method with a null params argument, implementations should <strong>always</strong> include logic for
* ignoring null input.
*
* @param bytes A valid array of bytes
* @param params Optional instructions for special-handling or interpretation of the input data (null objects are permitted and must be supported by
* implementations).
* @return A valid image information object describing the content extracted from the specified data.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful data access operation.
*/
public final ImageInfo getImageInfo(final byte[] bytes, final T params) throws ImagingException, IOException {
return getImageInfo(ByteSource.array(bytes), params);
}
/**
* Gets image information from the specified ByteSource. Format-specific ImageParser implementations are expected to return a valid ImageInfo object or to
* throw an ImageReadException if unable to process the specified data.
*
* @param byteSource A valid ByteSource object
* @return A valid image information object describing the content extracted from the specified data.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful data access operation.
*/
public final ImageInfo getImageInfo(final ByteSource byteSource) throws ImagingException, IOException {
return getImageInfo(byteSource, null);
}
/**
* Gets image information from the specified ByteSource. Format-specific ImageParser implementations are expected to return a valid ImageInfo object or to
* throw an ImageReadException if unable to process the specified data.
*
* <p>
* The params argument provides a mechanism for individual implementations to pass optional information into the parser. Not all formats will require this
* capability. Because the base class may call this method with a null params argument, implementations should <strong>always</strong> include logic for
* ignoring null input.
*
* @param byteSource A valid ByteSource object
* @param params Optional instructions for special-handling or interpretation of the input data (null objects are permitted and must be supported by
* implementations).
* @return A valid image information object describing the content extracted from the specified data.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful data access operation.
*/
public abstract ImageInfo getImageInfo(ByteSource byteSource, T params) throws ImagingException, IOException;
/**
* Gets image information from the specified file Format-specific ImageParser implementations are expected to return a valid ImageInfo object or to throw an
* ImageReadException if unable to process the specified data.
* <p>
* The params argument provides a mechanism for individual implementations to pass optional information into the parser. Not all formats will require this
* capability. Because the base class may call this method with a null params argument, implementations should <strong>always</strong> include logic for
* ignoring null input.
*
* @param file A valid File object
* @param params Optional instructions for special-handling or interpretation of the input data (null objects are permitted and must be supported by
* implementations).
* @return A valid image information object describing the content extracted from the specified data.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful file read or access operation.
*/
public final ImageInfo getImageInfo(final File file, final T params) throws ImagingException, IOException {
if (!canAcceptExtension(file)) {
return null;
}
return getImageInfo(ByteSource.file(file), params);
}
/**
* Gets the size of the image described by the specified byte array.
*
* @param bytes A valid byte array.
* @return A valid instance of Dimension.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final Dimension getImageSize(final byte[] bytes) throws ImagingException, IOException {
return getImageSize(bytes, null);
}
/**
* Gets the size of the image described by the specified byte array.
*
* @param bytes A valid byte array.
* @param params Optional instructions for special-handling or interpretation of the input data.
* @return A valid instance of Dimension.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final Dimension getImageSize(final byte[] bytes, final T params) throws ImagingException, IOException {
return getImageSize(ByteSource.array(bytes), params);
}
/**
* Gets the size of the image described by the specified ByteSource.
*
* @param byteSource A valid reference to a ByteSource.
* @param params Optional instructions for special-handling or interpretation of the input data.
* @return A valid instance of Dimension.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public abstract Dimension getImageSize(ByteSource byteSource, T params) throws ImagingException, IOException;
/**
* Gets the size of the image described by the specified file.
*
* @param file A valid reference to a file.
* @return A valid instance of Dimension.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final Dimension getImageSize(final File file) throws ImagingException, IOException {
return getImageSize(file, null);
}
/**
* Gets the size of the image described by the specified file.
*
* @param file A valid reference to a file.
* @param params Optional instructions for special-handling or interpretation of the input data.
* @return A valid instance of Dimension.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful read or access operation.
*/
public final Dimension getImageSize(final File file, final T params) throws ImagingException, IOException {
if (!canAcceptExtension(file)) {
return null;
}
return getImageSize(ByteSource.file(file), params);
}
/**
* Gets image metadata from the specified array of bytes. Format-specific ImageParser implementations are expected to return a valid IImageMetadata object
* or to throw an ImageReadException if unable to process the specified data.
*
* @param bytes A valid array of bytes
* @return A valid, potentially subject-matter-specific implementation of the IImageMetadata interface describing the content extracted from the source
* content.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful data read operation.
*/
public final ImageMetadata getMetadata(final byte[] bytes) throws ImagingException, IOException {
return getMetadata(bytes, null);
}
/**
* Gets image metadata from the specified array of bytes. Format-specific ImageParser implementations are expected to return a valid IImageMetadata object
* or to throw an ImageReadException if unable to process the specified data.
*
* <p>
* The params argument provides a mechanism for individual implementations to pass optional information into the parser. Not all formats will require this
* capability. Because the base class may call this method with a null params argument, implementations should <strong>always</strong> include logic for
* ignoring null input.
*
* @param bytes A valid array of bytes
* @param params Optional instructions for special-handling or interpretation of the input data (null objects are permitted and must be supported by
* implementations).
* @return A valid image metadata object describing the content extracted from the specified content.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful data read operation.
*/
public final ImageMetadata getMetadata(final byte[] bytes, final T params) throws ImagingException, IOException {
return getMetadata(ByteSource.array(bytes), params);
}
/**
* Gets image metadata from the specified byte source. Format-specific ImageParser implementations are expected to return a valid IImageMetadata object or
* to throw an ImageReadException if unable to process the specified byte source.
*
* @param byteSource A valid byte source.
* @return A valid, potentially subject-matter-specific implementation of the IImageMetadata interface describing the content extracted from the source
* content.
* @throws ImagingException In the event that the ByteSource content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful data read operation.
*/
public final ImageMetadata getMetadata(final ByteSource byteSource) throws ImagingException, IOException {
return getMetadata(byteSource, null);
}
/**
* Gets image metadata from the specified byte source. Format-specific ImageParser implementations are expected to return a valid IImageMetadata object or
* to throw an ImageReadException if unable to process the specified byte source.
*
* <p>
* The params argument provides a mechanism for individual implementations to pass optional information into the parser. Not all formats will require this
* capability. Because the base class may call this method with a null params argument, implementations should <strong>always</strong> include logic for
* ignoring null input.
*
* @param byteSource A valid byte source.
* @param params Optional instructions for special-handling or interpretation of the input data (null objects are permitted and must be supported by
* implementations).
* @return A valid, potentially subject-matter-specific implementation of the IImageMetadata interface describing the content extracted from the source
* content.
* @throws ImagingException In the event that the ByteSource content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful data read operation.
*/
public abstract ImageMetadata getMetadata(ByteSource byteSource, T params) throws ImagingException, IOException;
/**
* Gets image metadata from the specified file. Format-specific ImageParser implementations are expected to return a valid IImageMetadata object or to throw
* an ImageReadException if unable to process the specified data.
*
* @param file A valid reference to a file.
* @return A valid image metadata object describing the content extracted from the specified content.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful file read or access operation.
*/
public final ImageMetadata getMetadata(final File file) throws ImagingException, IOException {
return getMetadata(file, null);
}
/**
* Gets image metadata from the specified file. Format-specific ImageParser implementations are expected to return a valid IImageMetadata object or to throw
* an ImageReadException if unable to process the specified data.
*
* <p>
* The params argument provides a mechanism for individual implementations to pass optional information into the parser. Not all formats will require this
* capability. Because the base class may call this method with a null params argument, implementations should <strong>always</strong> include logic for
* ignoring null input.
*
* @param file A valid reference to a file.
* @param params Optional instructions for special-handling or interpretation of the input data (null objects are permitted and must be supported by
* implementations).
* @return A valid image metadata object describing the content extracted from the specified content.
* @throws ImagingException In the event that the specified content does not conform to the format of the specific parser implementation.
* @throws IOException In the event of unsuccessful file read or access operation.
*/
public final ImageMetadata getMetadata(final File file, final T params) throws ImagingException, IOException {
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.finest(getName() + ".getMetadata: " + file.getName());
}
if (!canAcceptExtension(file)) {
return null;
}
return getMetadata(ByteSource.file(file), params);
}
/**
* Gets a descriptive name for the implementation of an ImageParser.
*
* @return a valid, subject-matter-specific string.
*/
public abstract String getName();
/**
* Writes the content of a BufferedImage to the specified output stream.
*
* <p>
* The params argument provides a mechanism for individual implementations to pass optional information into the parser. Not all formats will support this
* capability. Currently, some of the parsers do not check for null arguments.
* </p>
*
* @param src An image giving the source content for output
* @param os A valid output stream for storing the formatted image
* @param params optional parameters, defining format-specific instructions for output (such as selections for data compression, color models, etc.)
* @throws ImagingException In the event that the output format cannot handle the input image or invalid params are specified.
* @throws IOException In the event of an write error from the output stream.
*/
public void writeImage(final BufferedImage src, final OutputStream os, final T params) throws ImagingException, IOException {
throw new ImagingException("This image format (" + getName() + ") cannot be written.");
}
}
|
googleapis/google-cloud-java | 37,499 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MemoryBankServiceProto.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/memory_bank_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
public final class MemoryBankServiceProto {
private MemoryBankServiceProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_CreateMemoryRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_CreateMemoryRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_CreateMemoryOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_CreateMemoryOperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GetMemoryRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GetMemoryRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_UpdateMemoryRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_UpdateMemoryRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_UpdateMemoryOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_UpdateMemoryOperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeleteMemoryRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeleteMemoryRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_DeleteMemoryOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_DeleteMemoryOperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_VertexSessionSource_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_VertexSessionSource_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectContentsSource_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectContentsSource_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectContentsSource_Event_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectContentsSource_Event_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectMemoriesSource_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectMemoriesSource_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectMemoriesSource_DirectMemory_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectMemoriesSource_DirectMemory_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_ScopeEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_ScopeEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesResponse_GeneratedMemory_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesResponse_GeneratedMemory_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesOperationMetadata_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesOperationMetadata_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_SimilaritySearchParams_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_SimilaritySearchParams_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_SimpleRetrievalParams_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_SimpleRetrievalParams_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_ScopeEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_ScopeEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesResponse_RetrievedMemory_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesResponse_RetrievedMemory_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n"
+ "9google/cloud/aiplatform/v1beta1/memory_bank_service.proto\022\037google.cloud.aiplat"
+ "form.v1beta1\032\034google/api/annotations.pro"
+ "to\032\027google/api/client.proto\032\037google/api/"
+ "field_behavior.proto\032\031google/api/resourc"
+ "e.proto\032-google/cloud/aiplatform/v1beta1/content.proto\0321google/cloud/aiplatform/"
+ "v1beta1/memory_bank.proto\032/google/cloud/aiplatform/v1beta1/operation.proto\032#goog"
+ "le/longrunning/operations.proto\032\033google/protobuf/empty.proto\032"
+ " google/protobuf/field_mask.proto\032\037google/protobuf/timestamp.proto\"\226\001\n"
+ "\023CreateMemoryRequest\022A\n"
+ "\006parent\030\001 \001(\tB1\340A\002\372A+\n"
+ ")aiplatform.googleapis.com/ReasoningEngine\022<\n"
+ "\006memory\030\002 \001(\0132\'.goo"
+ "gle.cloud.aiplatform.v1beta1.MemoryB\003\340A\002\"t\n"
+ "\035CreateMemoryOperationMetadata\022S\n"
+ "\020generic_metadata\030\001 \001(\01329.google.cloud.aipla"
+ "tform.v1beta1.GenericOperationMetadata\"J\n"
+ "\020GetMemoryRequest\0226\n"
+ "\004name\030\001 \001(\tB(\340A\002\372A\"\n"
+ " aiplatform.googleapis.com/Memory\"\211\001\n"
+ "\023UpdateMemoryRequest\022<\n"
+ "\006memory\030\001 \001(\0132\'.goo"
+ "gle.cloud.aiplatform.v1beta1.MemoryB\003\340A\002\0224\n"
+ "\013update_mask\030\002 \001(\0132\032.google.protobuf.FieldMaskB\003\340A\001\"t\n"
+ "\035UpdateMemoryOperationMetadata\022S\n"
+ "\020generic_metadata\030\001 \001(\01329.goog"
+ "le.cloud.aiplatform.v1beta1.GenericOperationMetadata\"\236\001\n"
+ "\023ListMemoriesRequest\022A\n"
+ "\006parent\030\001 \001(\tB1\340A\002\372A+\n"
+ ")aiplatform.googleapis.com/ReasoningEngine\022\023\n"
+ "\006filter\030\002 \001(\tB\003\340A\001\022\026\n"
+ "\tpage_size\030\003 \001(\005B\003\340A\001\022\027\n\n"
+ "page_token\030\004 \001(\tB\003\340A\001\"j\n"
+ "\024ListMemoriesResponse\0229\n"
+ "\010memories\030\001 \003(\0132\'.google.cloud.aiplatform.v1beta1.Memory\022\027\n"
+ "\017next_page_token\030\002 \001(\t\"M\n"
+ "\023DeleteMemoryRequest\0226\n"
+ "\004name\030\001 \001(\tB(\340A\002\372A\"\n"
+ " aiplatform.googleapis.com/Memory\"t\n"
+ "\035DeleteMemoryOperationMetadata\022S\n"
+ "\020generic_metadata\030\001 \001(\01329.google.cloud.aipla"
+ "tform.v1beta1.GenericOperationMetadata\"\240\t\n"
+ "\027GenerateMemoriesRequest\022m\n"
+ "\025vertex_session_source\030\002 \001(\0132L.google.cloud.aiplatf"
+ "orm.v1beta1.GenerateMemoriesRequest.VertexSessionSourceH\000\022o\n"
+ "\026direct_contents_source\030\003 \001(\0132M.google.cloud.aiplatform.v1be"
+ "ta1.GenerateMemoriesRequest.DirectContentsSourceH\000\022o\n"
+ "\026direct_memories_source\030\t \001(\0132M.google.cloud.aiplatform.v1beta1.Gen"
+ "erateMemoriesRequest.DirectMemoriesSourceH\000\022A\n"
+ "\006parent\030\001 \001(\tB1\340A\002\372A+\n"
+ ")aiplatform.googleapis.com/ReasoningEngine\022\"\n"
+ "\025disable_consolidation\030\004 \001(\010B\003\340A\001\022W\n"
+ "\005scope\030\010 \003("
+ "\0132C.google.cloud.aiplatform.v1beta1.GenerateMemoriesRequest.ScopeEntryB\003\340A\001\032\271\001\n"
+ "\023VertexSessionSource\022:\n"
+ "\007session\030\001 \001(\tB)\340A\002\372A#\n"
+ "!aiplatform.googleapis.com/Session\0223\n\n"
+ "start_time\030\002 \001(\0132\032.google.protobuf.TimestampB\003\340A\001\0221\n"
+ "\010end_time\030\003"
+ " \001(\0132\032.google.protobuf.TimestampB\003\340A\001\032\311\001\n"
+ "\024DirectContentsSource\022h\n"
+ "\006events\030\001 \003(\0132S.google.cloud."
+ "aiplatform.v1beta1.GenerateMemoriesRequest.DirectContentsSource.EventB\003\340A\002\032G\n"
+ "\005Event\022>\n"
+ "\007content\030\001"
+ " \001(\0132(.google.cloud.aiplatform.v1beta1.ContentB\003\340A\002\032\263\001\n"
+ "\024DirectMemoriesSource\022x\n"
+ "\017direct_memories\030\001 \003(\0132Z.google.cloud.aiplatform.v1beta1.Generate"
+ "MemoriesRequest.DirectMemoriesSource.DirectMemoryB\003\340A\002\032!\n"
+ "\014DirectMemory\022\021\n"
+ "\004fact\030\001 \001(\tB\003\340A\002\032,\n\n"
+ "ScopeEntry\022\013\n"
+ "\003key\030\001 \001(\t\022\r\n"
+ "\005value\030\002 \001(\t:\0028\001B\010\n"
+ "\006source\"\371\002\n"
+ "\030GenerateMemoriesResponse\022e\n"
+ "\022generated_memories\030\001 \003(\0132I.google.cloud.aiplatform.v1beta1.Gen"
+ "erateMemoriesResponse.GeneratedMemory\032\365\001\n"
+ "\017GeneratedMemory\0227\n"
+ "\006memory\030\001 \001(\0132\'.google.cloud.aiplatform.v1beta1.Memory\022`\n"
+ "\006action\030\002 \001(\0162P.google.cloud.aiplatform.v1b"
+ "eta1.GenerateMemoriesResponse.GeneratedMemory.Action\"G\n"
+ "\006Action\022\026\n"
+ "\022ACTION_UNSPECIFIED\020\000\022\013\n"
+ "\007CREATED\020\001\022\013\n"
+ "\007UPDATED\020\002\022\013\n"
+ "\007DELETED\020\003\"x\n"
+ "!GenerateMemoriesOperationMetadata\022S\n"
+ "\020generic_metadata\030\001 \001(\01329.google.cl"
+ "oud.aiplatform.v1beta1.GenericOperationMetadata\"\362\004\n"
+ "\027RetrieveMemoriesRequest\022s\n"
+ "\030similarity_search_params\030\006 \001(\0132O.google.c"
+ "loud.aiplatform.v1beta1.RetrieveMemoriesRequest.SimilaritySearchParamsH\000\022q\n"
+ "\027simple_retrieval_params\030\007 \001(\0132N.google.cloud"
+ ".aiplatform.v1beta1.RetrieveMemoriesRequest.SimpleRetrievalParamsH\000\022A\n"
+ "\006parent\030\001 \001(\tB1\340A\002\372A+\n"
+ ")aiplatform.googleapis.com/ReasoningEngine\022W\n"
+ "\005scope\030\010 \003(\0132C.google.c"
+ "loud.aiplatform.v1beta1.RetrieveMemoriesRequest.ScopeEntryB\003\340A\002\032G\n"
+ "\026SimilaritySearchParams\022\031\n"
+ "\014search_query\030\001 \001(\tB\003\340A\002\022\022\n"
+ "\005top_k\030\002 \001(\005B\003\340A\001\032H\n"
+ "\025SimpleRetrievalParams\022\026\n"
+ "\tpage_size\030\001 \001(\005B\003\340A\001\022\027\n\n"
+ "page_token\030\002 \001(\tB\003\340A\001\032,\n\n"
+ "ScopeEntry\022\013\n"
+ "\003key\030\001 \001(\t\022\r\n"
+ "\005value\030\002 \001(\t:\0028\001B\022\n"
+ "\020retrieval_params\"\370\001\n"
+ "\030RetrieveMemoriesResponse\022e\n"
+ "\022retrieved_memories\030\001 \003(\0132I.google.cloud.aiplatform."
+ "v1beta1.RetrieveMemoriesResponse.RetrievedMemory\022\027\n"
+ "\017next_page_token\030\002 \001(\t\032\\\n"
+ "\017RetrievedMemory\0227\n"
+ "\006memory\030\001 \001(\0132\'.google.cloud.aiplatform.v1beta1.Memory\022\020\n"
+ "\010distance\030\002 \001(\0012\341\020\n"
+ "\021MemoryBankService\022\236\002\n"
+ "\014CreateMemory\0224.google.cloud.aiplatform.v1beta1"
+ ".CreateMemoryRequest\032\035.google.longrunning.Operation\"\270\001\312A\'\n"
+ "\006Memory\022\035CreateMemoryO"
+ "perationMetadata\202\323\344\223\002\207\001\"D/v1beta1/{paren"
+ "t=projects/*/locations/*/reasoningEngines/*}/memories:\006memoryZ7\"-/v1beta1/{paren"
+ "t=reasoningEngines/*}/memories:\006memory\022\356\001\n"
+ "\tGetMemory\0221.google.cloud.aiplatform.v1beta1.GetMemoryRequest\032\'.google.cloud.a"
+ "iplatform.v1beta1.Memory\"\204\001\332A\004name\202\323\344\223\002w"
+ "\022D/v1beta1/{name=projects/*/locations/*/reasoningEngines/*/memories/*}Z/\022-/v1bet"
+ "a1/{name=reasoningEngines/*/memories/*}\022\301\002\n"
+ "\014UpdateMemory\0224.google.cloud.aiplatfo"
+ "rm.v1beta1.UpdateMemoryRequest\032\035.google.longrunning.Operation\"\333\001\312A\'\n"
+ "\006Memory\022\035UpdateMemoryOperationMetadata\332A\022memory,upda"
+ "te_mask\202\323\344\223\002\225\0012K/v1beta1/{memory.name=pr"
+ "ojects/*/locations/*/reasoningEngines/*/memories/*}:\006memoryZ>24/v1beta1/{memory."
+ "name=reasoningEngines/*/memories/*}:\006memory\022\204\002\n"
+ "\014ListMemories\0224.google.cloud.aiplatform.v1beta1.ListMemoriesRequest\0325.goo"
+ "gle.cloud.aiplatform.v1beta1.ListMemorie"
+ "sResponse\"\206\001\332A\006parent\202\323\344\223\002w\022D/v1beta1/{p"
+ "arent=projects/*/locations/*/reasoningEn"
+ "gines/*}/memoriesZ/\022-/v1beta1/{parent=reasoningEngines/*}/memories\022\243\002\n"
+ "\014DeleteMemory\0224.google.cloud.aiplatform.v1beta1.De"
+ "leteMemoryRequest\032\035.google.longrunning.Operation\"\275\001\312A6\n"
+ "\025google.protobuf.Empty\022\035D"
+ "eleteMemoryOperationMetadata\332A\004name\202\323\344\223\002"
+ "w*D/v1beta1/{name=projects/*/locations/*/reasoningEngines/*/memories/*}Z/*-/v1be"
+ "ta1/{name=reasoningEngines/*/memories/*}\022\315\002\n"
+ "\020GenerateMemories\0228.google.cloud.aip"
+ "latform.v1beta1.GenerateMemoriesRequest\032\035.google.longrunning.Operation\"\337\001\312A=\n"
+ "\030GenerateMemoriesResponse\022!GenerateMemories"
+ "OperationMetadata\332A\006parent\202\323\344\223\002\217\001\"M/v1be"
+ "ta1/{parent=projects/*/locations/*/reasoningEngines/*}/memories:generate:\001*Z;\"6/"
+ "v1beta1/{parent=reasoningEngines/*}/memories:generate:\001*\022\251\002\n"
+ "\020RetrieveMemories\0228.google.cloud.aiplatform.v1beta1.Retrieve"
+ "MemoriesRequest\0329.google.cloud.aiplatfor"
+ "m.v1beta1.RetrieveMemoriesResponse\"\237\001\332A\006"
+ "parent\202\323\344\223\002\217\001\"M/v1beta1/{parent=projects"
+ "/*/locations/*/reasoningEngines/*}/memories:retrieve:\001*Z;\"6/v1beta1/{parent=reas"
+ "oningEngines/*}/memories:retrieve:\001*\032M\312A"
+ "\031aiplatform.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platformB\355\001\n"
+ "#com.google.cloud.aiplatform.v1beta1B\026Mem"
+ "oryBankServiceProtoP\001ZCcloud.google.com/go/aiplatform/apiv1beta1/aiplatformpb;ai"
+ "platformpb\252\002\037Google.Cloud.AIPlatform.V1B"
+ "eta1\312\002\037Google\\Cloud\\AIPlatform\\V1beta1\352\002"
+ "\"Google::Cloud::AIPlatform::V1beta1b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.ClientProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.ContentProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.MemoryBankProto.getDescriptor(),
com.google.cloud.aiplatform.v1beta1.OperationProto.getDescriptor(),
com.google.longrunning.OperationsProto.getDescriptor(),
com.google.protobuf.EmptyProto.getDescriptor(),
com.google.protobuf.FieldMaskProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
});
internal_static_google_cloud_aiplatform_v1beta1_CreateMemoryRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_aiplatform_v1beta1_CreateMemoryRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_CreateMemoryRequest_descriptor,
new java.lang.String[] {
"Parent", "Memory",
});
internal_static_google_cloud_aiplatform_v1beta1_CreateMemoryOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_aiplatform_v1beta1_CreateMemoryOperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_CreateMemoryOperationMetadata_descriptor,
new java.lang.String[] {
"GenericMetadata",
});
internal_static_google_cloud_aiplatform_v1beta1_GetMemoryRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_aiplatform_v1beta1_GetMemoryRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GetMemoryRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_aiplatform_v1beta1_UpdateMemoryRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_aiplatform_v1beta1_UpdateMemoryRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_UpdateMemoryRequest_descriptor,
new java.lang.String[] {
"Memory", "UpdateMask",
});
internal_static_google_cloud_aiplatform_v1beta1_UpdateMemoryOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_aiplatform_v1beta1_UpdateMemoryOperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_UpdateMemoryOperationMetadata_descriptor,
new java.lang.String[] {
"GenericMetadata",
});
internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesRequest_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesRequest_descriptor,
new java.lang.String[] {
"Parent", "Filter", "PageSize", "PageToken",
});
internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesResponse_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_ListMemoriesResponse_descriptor,
new java.lang.String[] {
"Memories", "NextPageToken",
});
internal_static_google_cloud_aiplatform_v1beta1_DeleteMemoryRequest_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_aiplatform_v1beta1_DeleteMemoryRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeleteMemoryRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_aiplatform_v1beta1_DeleteMemoryOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_google_cloud_aiplatform_v1beta1_DeleteMemoryOperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_DeleteMemoryOperationMetadata_descriptor,
new java.lang.String[] {
"GenericMetadata",
});
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_descriptor,
new java.lang.String[] {
"VertexSessionSource",
"DirectContentsSource",
"DirectMemoriesSource",
"Parent",
"DisableConsolidation",
"Scope",
"Source",
});
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_VertexSessionSource_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_VertexSessionSource_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_VertexSessionSource_descriptor,
new java.lang.String[] {
"Session", "StartTime", "EndTime",
});
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectContentsSource_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectContentsSource_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectContentsSource_descriptor,
new java.lang.String[] {
"Events",
});
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectContentsSource_Event_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectContentsSource_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectContentsSource_Event_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectContentsSource_Event_descriptor,
new java.lang.String[] {
"Content",
});
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectMemoriesSource_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_descriptor
.getNestedTypes()
.get(2);
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectMemoriesSource_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectMemoriesSource_descriptor,
new java.lang.String[] {
"DirectMemories",
});
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectMemoriesSource_DirectMemory_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectMemoriesSource_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectMemoriesSource_DirectMemory_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_DirectMemoriesSource_DirectMemory_descriptor,
new java.lang.String[] {
"Fact",
});
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_ScopeEntry_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_descriptor
.getNestedTypes()
.get(3);
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_ScopeEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesRequest_ScopeEntry_descriptor,
new java.lang.String[] {
"Key", "Value",
});
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesResponse_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesResponse_descriptor,
new java.lang.String[] {
"GeneratedMemories",
});
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesResponse_GeneratedMemory_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesResponse_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesResponse_GeneratedMemory_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesResponse_GeneratedMemory_descriptor,
new java.lang.String[] {
"Memory", "Action",
});
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesOperationMetadata_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_GenerateMemoriesOperationMetadata_descriptor,
new java.lang.String[] {
"GenericMetadata",
});
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_descriptor,
new java.lang.String[] {
"SimilaritySearchParams",
"SimpleRetrievalParams",
"Parent",
"Scope",
"RetrievalParams",
});
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_SimilaritySearchParams_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_SimilaritySearchParams_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_SimilaritySearchParams_descriptor,
new java.lang.String[] {
"SearchQuery", "TopK",
});
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_SimpleRetrievalParams_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_SimpleRetrievalParams_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_SimpleRetrievalParams_descriptor,
new java.lang.String[] {
"PageSize", "PageToken",
});
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_ScopeEntry_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_descriptor
.getNestedTypes()
.get(2);
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_ScopeEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesRequest_ScopeEntry_descriptor,
new java.lang.String[] {
"Key", "Value",
});
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesResponse_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesResponse_descriptor,
new java.lang.String[] {
"RetrievedMemories", "NextPageToken",
});
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesResponse_RetrievedMemory_descriptor =
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesResponse_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesResponse_RetrievedMemory_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_aiplatform_v1beta1_RetrieveMemoriesResponse_RetrievedMemory_descriptor,
new java.lang.String[] {
"Memory", "Distance",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.methodSignature);
registry.add(com.google.api.ClientProto.oauthScopes);
registry.add(com.google.api.ResourceProto.resourceReference);
registry.add(com.google.longrunning.OperationsProto.operationInfo);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.ClientProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.ContentProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.MemoryBankProto.getDescriptor();
com.google.cloud.aiplatform.v1beta1.OperationProto.getDescriptor();
com.google.longrunning.OperationsProto.getDescriptor();
com.google.protobuf.EmptyProto.getDescriptor();
com.google.protobuf.FieldMaskProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
googleapis/google-cloud-java | 38,390 | java-dialogflow-cx/google-cloud-dialogflow-cx/src/main/java/com/google/cloud/dialogflow/cx/v3/stub/HttpJsonIntentsStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3.stub;
import static com.google.cloud.dialogflow.cx.v3.IntentsClient.ListIntentsPagedResponse;
import static com.google.cloud.dialogflow.cx.v3.IntentsClient.ListLocationsPagedResponse;
import com.google.api.HttpRule;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dialogflow.cx.v3.CreateIntentRequest;
import com.google.cloud.dialogflow.cx.v3.DeleteIntentRequest;
import com.google.cloud.dialogflow.cx.v3.ExportIntentsMetadata;
import com.google.cloud.dialogflow.cx.v3.ExportIntentsRequest;
import com.google.cloud.dialogflow.cx.v3.ExportIntentsResponse;
import com.google.cloud.dialogflow.cx.v3.GetIntentRequest;
import com.google.cloud.dialogflow.cx.v3.ImportIntentsMetadata;
import com.google.cloud.dialogflow.cx.v3.ImportIntentsRequest;
import com.google.cloud.dialogflow.cx.v3.ImportIntentsResponse;
import com.google.cloud.dialogflow.cx.v3.Intent;
import com.google.cloud.dialogflow.cx.v3.ListIntentsRequest;
import com.google.cloud.dialogflow.cx.v3.ListIntentsResponse;
import com.google.cloud.dialogflow.cx.v3.UpdateIntentRequest;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.ImmutableMap;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the Intents service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class HttpJsonIntentsStub extends IntentsStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder()
.add(ExportIntentsResponse.getDescriptor())
.add(ImportIntentsResponse.getDescriptor())
.add(ImportIntentsMetadata.getDescriptor())
.add(ExportIntentsMetadata.getDescriptor())
.build();
private static final ApiMethodDescriptor<ListIntentsRequest, ListIntentsResponse>
listIntentsMethodDescriptor =
ApiMethodDescriptor.<ListIntentsRequest, ListIntentsResponse>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3.Intents/ListIntents")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListIntentsRequest>newBuilder()
.setPath(
"/v3/{parent=projects/*/locations/*/agents/*}/intents",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListIntentsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListIntentsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(
fields, "intentView", request.getIntentViewValue());
serializer.putQueryParam(
fields, "languageCode", request.getLanguageCode());
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListIntentsResponse>newBuilder()
.setDefaultInstance(ListIntentsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetIntentRequest, Intent> getIntentMethodDescriptor =
ApiMethodDescriptor.<GetIntentRequest, Intent>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3.Intents/GetIntent")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetIntentRequest>newBuilder()
.setPath(
"/v3/{name=projects/*/locations/*/agents/*/intents/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetIntentRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetIntentRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "languageCode", request.getLanguageCode());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Intent>newBuilder()
.setDefaultInstance(Intent.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<CreateIntentRequest, Intent>
createIntentMethodDescriptor =
ApiMethodDescriptor.<CreateIntentRequest, Intent>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3.Intents/CreateIntent")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<CreateIntentRequest>newBuilder()
.setPath(
"/v3/{parent=projects/*/locations/*/agents/*}/intents",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CreateIntentRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreateIntentRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(
fields, "languageCode", request.getLanguageCode());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("intent", request.getIntent(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Intent>newBuilder()
.setDefaultInstance(Intent.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<UpdateIntentRequest, Intent>
updateIntentMethodDescriptor =
ApiMethodDescriptor.<UpdateIntentRequest, Intent>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3.Intents/UpdateIntent")
.setHttpMethod("PATCH")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<UpdateIntentRequest>newBuilder()
.setPath(
"/v3/{intent.name=projects/*/locations/*/agents/*/intents/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<UpdateIntentRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(
fields, "intent.name", request.getIntent().getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<UpdateIntentRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(
fields, "languageCode", request.getLanguageCode());
serializer.putQueryParam(fields, "updateMask", request.getUpdateMask());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("intent", request.getIntent(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Intent>newBuilder()
.setDefaultInstance(Intent.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<DeleteIntentRequest, Empty>
deleteIntentMethodDescriptor =
ApiMethodDescriptor.<DeleteIntentRequest, Empty>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3.Intents/DeleteIntent")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteIntentRequest>newBuilder()
.setPath(
"/v3/{name=projects/*/locations/*/agents/*/intents/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteIntentRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteIntentRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Empty>newBuilder()
.setDefaultInstance(Empty.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<ImportIntentsRequest, Operation>
importIntentsMethodDescriptor =
ApiMethodDescriptor.<ImportIntentsRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3.Intents/ImportIntents")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ImportIntentsRequest>newBuilder()
.setPath(
"/v3/{parent=projects/*/locations/*/agents/*}/intents:import",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ImportIntentsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ImportIntentsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearParent().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(ImportIntentsRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<ExportIntentsRequest, Operation>
exportIntentsMethodDescriptor =
ApiMethodDescriptor.<ExportIntentsRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.dialogflow.cx.v3.Intents/ExportIntents")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ExportIntentsRequest>newBuilder()
.setPath(
"/v3/{parent=projects/*/locations/*/agents/*}/intents:export",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ExportIntentsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ExportIntentsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearParent().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(ExportIntentsRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<ListLocationsRequest, ListLocationsResponse>
listLocationsMethodDescriptor =
ApiMethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setFullMethodName("google.cloud.location.Locations/ListLocations")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListLocationsRequest>newBuilder()
.setPath(
"/v3/{name=projects/*}/locations",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListLocationsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListLocationsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListLocationsResponse>newBuilder()
.setDefaultInstance(ListLocationsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetLocationRequest, Location>
getLocationMethodDescriptor =
ApiMethodDescriptor.<GetLocationRequest, Location>newBuilder()
.setFullMethodName("google.cloud.location.Locations/GetLocation")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetLocationRequest>newBuilder()
.setPath(
"/v3/{name=projects/*/locations/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetLocationRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetLocationRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Location>newBuilder()
.setDefaultInstance(Location.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<ListIntentsRequest, ListIntentsResponse> listIntentsCallable;
private final UnaryCallable<ListIntentsRequest, ListIntentsPagedResponse>
listIntentsPagedCallable;
private final UnaryCallable<GetIntentRequest, Intent> getIntentCallable;
private final UnaryCallable<CreateIntentRequest, Intent> createIntentCallable;
private final UnaryCallable<UpdateIntentRequest, Intent> updateIntentCallable;
private final UnaryCallable<DeleteIntentRequest, Empty> deleteIntentCallable;
private final UnaryCallable<ImportIntentsRequest, Operation> importIntentsCallable;
private final OperationCallable<
ImportIntentsRequest, ImportIntentsResponse, ImportIntentsMetadata>
importIntentsOperationCallable;
private final UnaryCallable<ExportIntentsRequest, Operation> exportIntentsCallable;
private final OperationCallable<
ExportIntentsRequest, ExportIntentsResponse, ExportIntentsMetadata>
exportIntentsOperationCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable;
private final UnaryCallable<GetLocationRequest, Location> getLocationCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonIntentsStub create(IntentsStubSettings settings) throws IOException {
return new HttpJsonIntentsStub(settings, ClientContext.create(settings));
}
public static final HttpJsonIntentsStub create(ClientContext clientContext) throws IOException {
return new HttpJsonIntentsStub(IntentsStubSettings.newHttpJsonBuilder().build(), clientContext);
}
public static final HttpJsonIntentsStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonIntentsStub(
IntentsStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of HttpJsonIntentsStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected HttpJsonIntentsStub(IntentsStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new HttpJsonIntentsCallableFactory());
}
/**
* Constructs an instance of HttpJsonIntentsStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected HttpJsonIntentsStub(
IntentsStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonOperationsStub.create(
clientContext,
callableFactory,
typeRegistry,
ImmutableMap.<String, HttpRule>builder()
.put(
"google.longrunning.Operations.CancelOperation",
HttpRule.newBuilder()
.setPost("/v3/{name=projects/*/operations/*}:cancel")
.addAdditionalBindings(
HttpRule.newBuilder()
.setPost("/v3/{name=projects/*/locations/*/operations/*}:cancel")
.build())
.build())
.put(
"google.longrunning.Operations.GetOperation",
HttpRule.newBuilder()
.setGet("/v3/{name=projects/*/operations/*}")
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v3/{name=projects/*/locations/*/operations/*}")
.build())
.build())
.put(
"google.longrunning.Operations.ListOperations",
HttpRule.newBuilder()
.setGet("/v3/{name=projects/*}/operations")
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v3/{name=projects/*/locations/*}/operations")
.build())
.build())
.build());
HttpJsonCallSettings<ListIntentsRequest, ListIntentsResponse> listIntentsTransportSettings =
HttpJsonCallSettings.<ListIntentsRequest, ListIntentsResponse>newBuilder()
.setMethodDescriptor(listIntentsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetIntentRequest, Intent> getIntentTransportSettings =
HttpJsonCallSettings.<GetIntentRequest, Intent>newBuilder()
.setMethodDescriptor(getIntentMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<CreateIntentRequest, Intent> createIntentTransportSettings =
HttpJsonCallSettings.<CreateIntentRequest, Intent>newBuilder()
.setMethodDescriptor(createIntentMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<UpdateIntentRequest, Intent> updateIntentTransportSettings =
HttpJsonCallSettings.<UpdateIntentRequest, Intent>newBuilder()
.setMethodDescriptor(updateIntentMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("intent.name", String.valueOf(request.getIntent().getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeleteIntentRequest, Empty> deleteIntentTransportSettings =
HttpJsonCallSettings.<DeleteIntentRequest, Empty>newBuilder()
.setMethodDescriptor(deleteIntentMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ImportIntentsRequest, Operation> importIntentsTransportSettings =
HttpJsonCallSettings.<ImportIntentsRequest, Operation>newBuilder()
.setMethodDescriptor(importIntentsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<ExportIntentsRequest, Operation> exportIntentsTransportSettings =
HttpJsonCallSettings.<ExportIntentsRequest, Operation>newBuilder()
.setMethodDescriptor(exportIntentsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListLocationsRequest, ListLocationsResponse>
listLocationsTransportSettings =
HttpJsonCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setMethodDescriptor(listLocationsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetLocationRequest, Location> getLocationTransportSettings =
HttpJsonCallSettings.<GetLocationRequest, Location>newBuilder()
.setMethodDescriptor(getLocationMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
this.listIntentsCallable =
callableFactory.createUnaryCallable(
listIntentsTransportSettings, settings.listIntentsSettings(), clientContext);
this.listIntentsPagedCallable =
callableFactory.createPagedCallable(
listIntentsTransportSettings, settings.listIntentsSettings(), clientContext);
this.getIntentCallable =
callableFactory.createUnaryCallable(
getIntentTransportSettings, settings.getIntentSettings(), clientContext);
this.createIntentCallable =
callableFactory.createUnaryCallable(
createIntentTransportSettings, settings.createIntentSettings(), clientContext);
this.updateIntentCallable =
callableFactory.createUnaryCallable(
updateIntentTransportSettings, settings.updateIntentSettings(), clientContext);
this.deleteIntentCallable =
callableFactory.createUnaryCallable(
deleteIntentTransportSettings, settings.deleteIntentSettings(), clientContext);
this.importIntentsCallable =
callableFactory.createUnaryCallable(
importIntentsTransportSettings, settings.importIntentsSettings(), clientContext);
this.importIntentsOperationCallable =
callableFactory.createOperationCallable(
importIntentsTransportSettings,
settings.importIntentsOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.exportIntentsCallable =
callableFactory.createUnaryCallable(
exportIntentsTransportSettings, settings.exportIntentsSettings(), clientContext);
this.exportIntentsOperationCallable =
callableFactory.createOperationCallable(
exportIntentsTransportSettings,
settings.exportIntentsOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.listLocationsCallable =
callableFactory.createUnaryCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.listLocationsPagedCallable =
callableFactory.createPagedCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.getLocationCallable =
callableFactory.createUnaryCallable(
getLocationTransportSettings, settings.getLocationSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(listIntentsMethodDescriptor);
methodDescriptors.add(getIntentMethodDescriptor);
methodDescriptors.add(createIntentMethodDescriptor);
methodDescriptors.add(updateIntentMethodDescriptor);
methodDescriptors.add(deleteIntentMethodDescriptor);
methodDescriptors.add(importIntentsMethodDescriptor);
methodDescriptors.add(exportIntentsMethodDescriptor);
methodDescriptors.add(listLocationsMethodDescriptor);
methodDescriptors.add(getLocationMethodDescriptor);
return methodDescriptors;
}
public HttpJsonOperationsStub getHttpJsonOperationsStub() {
return httpJsonOperationsStub;
}
@Override
public UnaryCallable<ListIntentsRequest, ListIntentsResponse> listIntentsCallable() {
return listIntentsCallable;
}
@Override
public UnaryCallable<ListIntentsRequest, ListIntentsPagedResponse> listIntentsPagedCallable() {
return listIntentsPagedCallable;
}
@Override
public UnaryCallable<GetIntentRequest, Intent> getIntentCallable() {
return getIntentCallable;
}
@Override
public UnaryCallable<CreateIntentRequest, Intent> createIntentCallable() {
return createIntentCallable;
}
@Override
public UnaryCallable<UpdateIntentRequest, Intent> updateIntentCallable() {
return updateIntentCallable;
}
@Override
public UnaryCallable<DeleteIntentRequest, Empty> deleteIntentCallable() {
return deleteIntentCallable;
}
@Override
public UnaryCallable<ImportIntentsRequest, Operation> importIntentsCallable() {
return importIntentsCallable;
}
@Override
public OperationCallable<ImportIntentsRequest, ImportIntentsResponse, ImportIntentsMetadata>
importIntentsOperationCallable() {
return importIntentsOperationCallable;
}
@Override
public UnaryCallable<ExportIntentsRequest, Operation> exportIntentsCallable() {
return exportIntentsCallable;
}
@Override
public OperationCallable<ExportIntentsRequest, ExportIntentsResponse, ExportIntentsMetadata>
exportIntentsOperationCallable() {
return exportIntentsOperationCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return listLocationsCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return listLocationsPagedCallable;
}
@Override
public UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return getLocationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
oracle/coherence | 38,014 | prj/test/unit/coherence-tests/src/test/java/com/tangosol/io/pof/reflect/PofValueTest.java | /*
* Copyright (c) 2000, 2022, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* http://oss.oracle.com/licenses/upl.
*/
package com.tangosol.io.pof.reflect;
import com.tangosol.io.pof.PofConstants;
import com.tangosol.io.pof.PofContext;
import com.tangosol.io.pof.PortableObject;
import com.tangosol.util.Binary;
import com.tangosol.util.CompositeKey;
import com.tangosol.util.LongArray;
import data.pof.ObjectWithAllTypes;
import data.pof.PofDataUtils;
import data.pof.PortablePerson;
import data.pof.PortablePersonReference;
import data.pof.TestValue;
import org.junit.Test;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Date;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.*;
/**
* Tests for the PofValue class.
*
* @author as 2009.01.31
*/
public class PofValueTest
extends PofDataUtils
{
// ----- test methodds --------------------------------------------------
/**
* Test {@link PofValue(Binary, PofContext)}.
*/
@Test
public void testPofValueInitialization()
throws IOException
{
PortablePerson person = PortablePerson.create();
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
Binary binPerson = serialize(person, MODE_PLAIN, fRefEnabled);
PofValue pv = PofValueParser.parse(binPerson, getPofContext(fRefEnabled));
assertEquals(pv.getTypeId(), 2);
assertEquals(pv.getValue(), person);
if (fRefEnabled)
{
break;
}
fRefEnabled = true;
}
}
/**
* Test access to POF values using plain Binary.
*/
@Test
public void testPofValueAccessorWithPlainBinary()
throws IOException
{
PortablePerson person = PortablePerson.create();
Binary binPerson = serialize(person, MODE_PLAIN);
testPofValueAccessor(person, binPerson, false);
// Test the case where we try to access an object that contains a
// uniform collection of user defined objects when object reference
// is enabled.
try
{
binPerson = serialize(person, MODE_PLAIN, true);
testPofValueAccessor(person, binPerson, true);
fail("Should've thrown UnsupportedOperationException.");
}
catch (UnsupportedOperationException e)
{
}
}
/**
* Test access to POF values using FMT_EXT Binary.
*/
@Test
public void testPofValueAccessorWithFmtExtBinary()
throws IOException
{
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
PortablePerson person = fRefEnabled ?
PortablePerson.createNoChildren() : PortablePerson.create();
Binary binPerson = serialize(person, MODE_FMT_EXT, fRefEnabled);
testPofValueAccessor(person, binPerson, fRefEnabled);
if (fRefEnabled)
{
break;
}
fRefEnabled = true;
}
}
/**
* Test access to POF values using int decorated Binary.
*/
@Test
public void testPofValueAccessorWithIntegerDecoratedObject()
throws IOException
{
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
PortablePerson person = fRefEnabled ?
PortablePerson.createNoChildren() : PortablePerson.create();
Binary binPerson = serialize(person, MODE_FMT_IDO, fRefEnabled);
testPofValueAccessor(person, binPerson, fRefEnabled);
if (fRefEnabled)
{
break;
}
fRefEnabled = true;
}
}
/**
* Test access to POF values using decorated Binary.
*/
@Test
public void testPofValueAccessorWithDecoratedBinary()
throws IOException
{
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
PortablePerson person = fRefEnabled ?
PortablePerson.createNoChildren() : PortablePerson.create();
Binary binPerson = serialize(person, MODE_FMT_DECO, fRefEnabled);
testPofValueAccessor(person, binPerson, fRefEnabled);
if (fRefEnabled)
{
break;
}
fRefEnabled = true;
}
}
/**
* Assertions used by all testPofValueAccessor* methods.
*
* @param person object to test
* @param binPerson binary serialized object
* @param fRefEnabled flag to indicate if object identity/reference is enabled
*/
public void testPofValueAccessor(PortablePerson person, Binary binPerson, boolean fRefEnabled)
throws IOException
{
PofValue pv = PofValueParser.parse(binPerson, getPofContext(fRefEnabled));
assertEquals(pv.getChild(1).getValue(), person.getAddress());
assertEquals(pv.getChild(0).getValue(), person.m_sName);
assertEquals(((Date) pv.getChild(2).getValue()).getTime(),
person.m_dtDOB.getTime());
// test NilPofValue
PofValue nv = pv.getChild(100);
assertNotNull(nv);
assertNull(nv.getValue());
// test PofNavigationException
try
{
pv.getChild(0).getChild(0);
fail("Should've thrown PofNavigationException");
}
catch (PofNavigationException ignore) {}
}
/**
* Test access to nested POF values.
*/
@Test
public void testNestedPofValueAccessor()
throws IOException
{
PortablePerson person = PortablePerson.create();
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
Binary binPerson = serialize(person, MODE_PLAIN, fRefEnabled);
PofValue pv = PofValueParser.parse(binPerson, getPofContext(fRefEnabled));
assertEquals(pv.getChild(1).getChild(0).getValue(),
person.getAddress().m_sStreet);
assertEquals(pv.getChild(1).getChild(1).getValue(),
person.getAddress().m_sCity);
assertEquals(pv.getChild(1).getChild(2).getValue(),
person.getAddress().m_sState);
assertEquals(pv.getChild(1).getChild(3).getValue(),
person.getAddress().m_sZip);
if (fRefEnabled)
{
break;
}
fRefEnabled = true;
}
}
/**
* Test POF value mutation with plain Binary.
*/
@Test
public void testPofValueMutatorWithPlainBinary()
throws IOException
{
PortablePerson person = PortablePerson.create();
Binary binPerson = serialize(person, MODE_PLAIN);
testPofValueMutator(person, binPerson);
}
/**
* Test POF value mutation with plain Binary.
*/
@Test
public void testPofValueMutatorWithFmtExtBinary()
throws IOException
{
PortablePerson person = PortablePerson.create();
Binary binPerson = serialize(person, MODE_FMT_EXT);
testPofValueMutator(person, binPerson);
}
/**
* Test POF value mutation with plain Binary.
*/
@Test
public void testPofValueMutatorWithIntDecoratedObject()
throws IOException
{
PortablePerson person = PortablePerson.create();
Binary binPerson = serialize(person, MODE_FMT_IDO);
testPofValueMutator(person, binPerson);
}
/**
* Test POF value mutation with decorated Binary.
*/
@Test
public void testPofValueMutatorWithDecoratedBinary()
throws IOException
{
PortablePerson person = PortablePerson.create();
Binary binPerson = serialize(person, MODE_FMT_DECO);
testPofValueMutator(person, binPerson);
}
/**
* Assertions used by all testPofValueMutator* methods.
*
* @param p object to test
* @param binPerson binary serialized object
*/
public void testPofValueMutator(PortablePerson p, Binary binPerson)
throws IOException
{
PofValue pv = PofValueParser.parse(binPerson, getPofContext());
Binary binUnmodified = pv.applyChanges();
assertEquals(binUnmodified, binPerson);
pv.getChild(0).setValue("Seovic Aleksandar");
assertEquals(pv.getChild(0).getValue(), "Seovic Aleksandar");
pv.getChild(0).setValue("Marija Seovic");
pv.getChild(1).getChild(0).setValue("456 Main St");
pv.getChild(1).getChild(1).setValue("Lutz");
pv.getChild(1).getChild(3).setValue("33549");
pv.getChild(2).setValue(new Date(78, 1, 20));
pv.getChild(3).setValue(new PortablePerson("Aleksandar Seovic", new Date(74, 7, 24)));
pv.getChild(4).setValue(p.getChildren());
binPerson = pv.applyChanges();
PortablePerson p2 = (PortablePerson) deserialize(binPerson);
assertEquals(p2.m_sName, "Marija Seovic");
assertEquals(p2.getAddress().m_sStreet, "456 Main St");
assertEquals(p2.getAddress().m_sCity, "Lutz");
assertEquals(p2.getAddress().m_sZip, "33549");
assertEquals(p2.m_dtDOB, new Date(78, 1, 20));
assertEquals(p2.getSpouse().m_sName, "Aleksandar Seovic");
assertEquals(p2.getChildren(), p.getChildren());
pv = PofValueParser.parse(binPerson, getPofContext());
pv.getChild(0).setValue("Ana Maria Seovic");
pv.getChild(2).setValue(new Date(104, 7, 14));
pv.getChild(3).setValue(null);
pv.getChild(4).setValue(null);
binPerson = pv.applyChanges();
PortablePerson p3 = (PortablePerson) deserialize(binPerson);
assertEquals(p3.m_sName, "Ana Maria Seovic");
assertEquals(p3.getAddress(), p2.getAddress());
assertEquals(p3.m_dtDOB, new Date(104, 7, 14));
assertNull(p3.getSpouse());
assertNull(p3.getChildren());
}
/**
* Test PofArray.
*/
@Test
public void testPofArray()
throws IOException
{
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
TestValue tv = TestValue.create(fRefEnabled);
Binary bin = serialize(tv, MODE_FMT_EXT, fRefEnabled);
PofValue root = PofValueParser.parse(bin, getPofContext(fRefEnabled));
PofValue pv = root.getChild(0);
assertEquals(((PofArray) pv).getLength(), 4);
assertEquals(pv.getChild(0).getValue(), Integer.valueOf(1));
assertEquals(pv.getChild(1).getValue(), "two");
assertEquals(pv.getChild(2).getValue(), fRefEnabled ? PortablePerson.createNoChildren() : PortablePerson.create());
assertEquals(pv.getChild(3).getValue(), new Binary(new byte[]{22, 23, 24}));
try
{
pv.getChild(100);
fail("Should've thrown IndexOutOfBoundsException.");
}
catch (IndexOutOfBoundsException ignore)
{
}
if (fRefEnabled)
{
break;
}
pv.getChild(1).setValue("dva");
pv.getChild(2).getChild(0).setValue("Novak");
Binary binModified = root.applyChanges();
TestValue tvModified = (TestValue) deserialize(binModified);
assertEquals(tvModified.m_oArray[1], "dva");
assertEquals(((PortablePerson) tvModified.m_oArray[2]).m_sName,
"Novak");
fRefEnabled = true;
}
}
/**
* Test PofUniformArray.
*/
@Test
public void testPofUniformArray()
throws IOException
{
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
TestValue tv = TestValue.create(fRefEnabled);
Binary bin = serialize(tv, MODE_FMT_EXT, fRefEnabled);
PofValue root = PofValueParser.parse(bin, getPofContext(fRefEnabled));
PofValue pv = root.getChild(1);
assertEquals(((PofArray) pv).getLength(), 4);
assertEquals(pv.getChild(0).getValue(), "one");
assertEquals(pv.getChild(1).getValue(), "two");
assertEquals(pv.getChild(2).getValue(), "three");
assertEquals(pv.getChild(3).getValue(), "four");
try
{
pv.getChild(100);
fail("Should've thrown IndexOutOfBoundsException.");
}
catch (IndexOutOfBoundsException ignore)
{}
if (fRefEnabled)
{
try
{
root.applyChanges();
fail("Should've thrown UnsupportedOperationException.");
}
catch (UnsupportedOperationException e)
{
}
}
else
{
pv.getChild(0).setValue("jedan");
pv.getChild(3).setValue("cetiri");
Binary binModified = root.applyChanges();
TestValue tvModified = (TestValue) deserialize(binModified);
assertEquals(tvModified.m_sArray[0], "jedan");
assertEquals(tvModified.m_sArray[3], "cetiri");
}
if (fRefEnabled)
{
break;
}
fRefEnabled = true;
}
}
/**
* Test PofCollection.
*/
@Test
public void testPofCollection()
throws IOException
{
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
TestValue tv = TestValue.create(fRefEnabled);
Binary bin = serialize(tv, MODE_FMT_EXT, fRefEnabled);
PofValue root = PofValueParser.parse(bin, getPofContext(fRefEnabled));
PofValue pv = root.getChild(0);
Object o = pv.getValue();
if (fRefEnabled)
{
pv = pv.getChild(2);
}
pv = root.getChild(2);
assertEquals(((PofCollection) pv).getLength(), 4);
assertEquals(pv.getChild(0).getValue(), Integer.valueOf(1));
assertEquals(pv.getChild(1).getValue(), "two");
Object person = pv.getChild(2).getValue();
assertEquals(person, fRefEnabled ? PortablePerson.createNoChildren() : PortablePerson.create());
try
{
pv.getChild(100);
fail("Should've thrown IndexOutOfBoundsException.");
}
catch (IndexOutOfBoundsException ignore)
{}
if (fRefEnabled)
{
break;
}
pv.getChild(1).setValue("dva");
pv.getChild(2).getChild(0).setValue("Novak");
assertEquals(pv.getChild(3).getValue(), new Binary(new byte[]{22, 23, 24}));
Binary binModified = root.applyChanges();
TestValue tvModified = (TestValue) deserialize(binModified);
assertEquals(((ArrayList) tvModified.m_col).get(1), "dva");
assertEquals(((PortablePerson) ((ArrayList) tvModified.m_col).get(2)).m_sName,
"Novak");
root = PofValueParser.parse(bin, getPofContext());
pv = root.getChild(0);
person = pv.getChild(2).getValue();
PofValue pv2 = root.getChild(2);
Object address = pv2.getChild(2).getChild(1).getValue();
assertEquals(address, ((PortablePerson) person).getAddress());
root = PofValueParser.parse(bin, getPofContext());
pv = root.getChild(2);
pv.getChild(2).getChild(0).setValue("John Smith");
binModified = root.applyChanges();
tvModified = (TestValue) deserialize(binModified);
assertEquals(((PortablePerson) ((ArrayList) tvModified.m_col).get(2)).m_sName,
"John Smith");
fRefEnabled = true;
}
}
/**
* Test PofUniformCollection.
*/
@Test
public void testPofUniformCollection()
throws IOException
{
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
TestValue tv = TestValue.create(fRefEnabled);
Binary bin = serialize(tv, MODE_FMT_EXT, fRefEnabled);
PofValue root = PofValueParser.parse(bin, getPofContext(fRefEnabled));
PofValue pv = root.getChild(3);
assertEquals(((PofArray) pv).getLength(), 4);
assertEquals(pv.getChild(0).getValue(), "one");
assertEquals(pv.getChild(1).getValue(), "two");
assertEquals(pv.getChild(2).getValue(), "three");
assertEquals(pv.getChild(3).getValue(), "four");
try
{
pv.getChild(100);
fail("Should've thrown IndexOutOfBoundsException.");
}
catch (IndexOutOfBoundsException ignore)
{}
if (fRefEnabled)
{
break;
}
pv.getChild(0).setValue("jedan");
pv.getChild(3).setValue("cetiri");
Binary binModified = root.applyChanges();
TestValue tvModified = (TestValue) deserialize(binModified);
ArrayList sList = (ArrayList) tvModified.m_colUniform;
assertEquals(sList.get(0), "jedan");
assertEquals(sList.get(3), "cetiri");
fRefEnabled = true;
}
}
/**
* Test PofSparseArray.
*/
@Test
public void testPofSparseArray()
throws IOException
{
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
TestValue tv = TestValue.create(fRefEnabled);
Binary bin = serialize(tv, MODE_FMT_EXT, fRefEnabled);
PofValue root = PofValueParser.parse(bin, getPofContext(fRefEnabled));
PofValue pv = root.getChild(4);
// Test the case where we try to read a reference id before the object is read.
// We should get an
// IOException: missing identity: 2
// Work around by reading the person object in root.getChild(0) where it first appears
// so that root.getChild(4).getValue(), which references the person object will have it.
if (fRefEnabled)
{
try
{
System.out.println(pv.getValue());
fail("Should've thrown Exception.");
}
catch (Exception e)
{
}
root.getChild(0).getChild(2).getValue();
}
System.out.println(pv.getValue());
assertTrue(pv instanceof PofSparseArray);
assertEquals(pv.getChild(4).getValue(), Integer.valueOf(4));
assertEquals(pv.getChild(2).getValue(), "two");
assertEquals(pv.getChild(5).getValue(), fRefEnabled ? PortablePerson.createNoChildren() : PortablePerson.create());
if (fRefEnabled)
{
break;
}
pv.getChild(1).setValue(Integer.valueOf(1));
pv.getChild(2).setValue("dva");
pv.getChild(3).setValue("tri");
pv.getChild(5).getChild(0).setValue("Novak");
Binary binModified = root.applyChanges();
TestValue tvModified = (TestValue) deserialize(binModified);
System.out.println("m_oArray:Person " + tvModified.m_oArray[2]);
System.out.println(tvModified.m_sparseArray);
// not using reference, modification of person in m_sparseArray
// does not affect the person in m_oArray.
assertEquals(((PortablePerson) tvModified.m_oArray[2]).m_sName,
"Aleksandar Seovic");
assertEquals(tvModified.m_sparseArray.get(1), Integer.valueOf(1));
assertEquals(tvModified.m_sparseArray.get(2), "dva");
assertEquals(tvModified.m_sparseArray.get(3), "tri");
assertEquals(((PortablePerson) tvModified.m_sparseArray.get(5)).m_sName,
"Novak");
fRefEnabled = true;
}
}
/**
* Test PofUniformSparseArray.
*/
@Test
public void testPofUniformSparseArray()
throws IOException
{
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
TestValue tv = TestValue.create(fRefEnabled);
Binary bin = serialize(tv, MODE_FMT_EXT, fRefEnabled);
PofValue root = PofValueParser.parse(bin, getPofContext(fRefEnabled));
PofValue pv = root.getChild(5);
System.out.println(pv.getValue());
assertTrue(pv instanceof PofUniformSparseArray);
assertEquals(pv.getChild(2).getValue(), "two");
assertEquals(pv.getChild(4).getValue(), "four");
if (fRefEnabled)
{
break;
}
pv.getChild(1).setValue("jedan");
pv.getChild(3).setValue("tri");
pv.getChild(4).setValue("cetiri");
pv.getChild(5).setValue("pet");
Binary binModified = root.applyChanges();
TestValue tvModified = (TestValue) deserialize(binModified);
LongArray arr = tvModified.m_uniformSparseArray;
System.out.println(arr);
assertEquals(arr.get(1), "jedan");
assertEquals(arr.get(2), "two");
assertEquals(arr.get(3), "tri");
assertEquals(arr.get(4), "cetiri");
assertEquals(arr.get(5), "pet");
fRefEnabled = true;
}
}
/**
* Test getBooleanArray().
*/
@Test
public void testGetBooleanArray()
throws IOException
{
boolean[] af = new boolean[] {true, false, true, false, true, false};
Binary bin = serialize(af, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
boolean[] afGet = pv.getBooleanArray();
assertEquals(afGet.length, af.length);
for (int i = 0; i < af.length; i++)
{
assertTrue(afGet[i] == af[i]);
}
try
{
pv.getObjectArray();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getByteArray().
*/
@Test
public void testGetByteArray()
throws IOException
{
byte[] ab = new byte[] {0, -1, 2, -3, 4, -5};
Binary bin = serialize(ab, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
byte[] abGet = pv.getByteArray();
assertEquals(abGet.length, ab.length);
for (int i = 0; i < ab.length; i++)
{
assertTrue(abGet[i] == ab[i]);
}
try
{
pv.getCharArray();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getCharArray().
*/
@Test
public void testGetCharArray()
throws IOException
{
char[] ach = new char[] {'\u0000', '\u1010', '\u2020', '\u3030'};
Binary bin = serialize(ach, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
char[] achGet = pv.getCharArray();
assertEquals(achGet.length, ach.length);
for (int i = 0; i < ach.length; i++)
{
assertTrue(achGet[i] == ach[i]);
}
try
{
pv.getShortArray();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getShortArray().
*/
@Test
public void testGetShortArray()
throws IOException
{
short[] an = new short[] {0x0000, 0x0001, 0x0002, 0x0003, 0x0004};
Binary bin = serialize(an, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
short[] anGet = pv.getShortArray();
assertEquals(anGet.length, an.length);
for (int i = 0; i < an.length; i++)
{
assertTrue(anGet[i] == an[i]);
}
try
{
pv.getIntArray();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getIntArray().
*/
@Test
public void testGetIntArray()
throws IOException
{
int[] an = new int[] {0x0000, 0x0001, 0x0002, 0x0003, 0x0004};
Binary bin = serialize(an, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
int[] anGet = pv.getIntArray();
assertEquals(anGet.length, an.length);
for (int i = 0; i < an.length; i++)
{
assertTrue(anGet[i] == an[i]);
}
try
{
pv.getLongArray();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getLongArray().
*/
@Test
public void testGetLongArray()
throws IOException
{
long[] al = new long[] {0L, 1L, 2L, 3L, 4L, 5L};
Binary bin = serialize(al, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
long[] alGet = pv.getLongArray();
assertEquals(alGet.length, al.length);
for (int i=0; i<al.length; i++)
{
assertTrue(alGet[i] == al[i]);
}
try
{
pv.getIntArray();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getFloatArray().
*/
@Test
public void testGetFloatArray()
throws IOException
{
float[] afl = new float[] {0.0f, 0.1f, 0.2f, 0.3f, 0.4f, 0.5f};
Binary bin = serialize(afl, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
float[] aflGet = pv.getFloatArray();
assertEquals(aflGet.length, afl.length);
for (int i =0; i < afl.length; i++)
{
assertTrue(aflGet[i] == afl[i]);
}
try
{
pv.getIntArray();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getDoubleArray().
*/
@Test
public void testGetDoubleArray()
throws IOException
{
double[] adfl = new double[] {0.0d, 0.1d, 0.2d, 0.3d, 0.4d, 0.5d};
Binary bin = serialize(adfl, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
double[] adflGet = pv.getDoubleArray();
assertEquals(adflGet.length, adfl.length);
for (int i = 0; i < adfl.length; i++)
{
assertTrue(adflGet[i] == adfl[i]);
}
try
{
pv.getIntArray();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getBigInteger().
*/
@Test
public void testGetBigInteger()
throws IOException
{
BigInteger n = new BigInteger("12345678901234567890");
Binary bin = serialize(n, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
BigInteger nGet = pv.getBigInteger();
assertEquals(nGet, n);
try
{
pv.getString();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getBigDecimal().
*/
@Test
public void testGetBigDecimal()
throws IOException
{
BigDecimal dec = new BigDecimal("1234567890.0987654321");
Binary bin = serialize(dec, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
BigDecimal decGet = pv.getBigDecimal();
assertEquals(decGet, dec);
try
{
pv.getString();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getString().
*/
@Test
public void testGetString()
throws IOException
{
String s = "qwerty";
Binary bin = serialize(s, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
String sGet = pv.getString();
assertEquals(sGet, s);
try
{
pv.getInt();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getDate().
*/
@Test
public void testGetDate()
throws IOException
{
Date date = new Date();
Binary bin = serialize(date, MODE_PLAIN);
PofValue pv = PofValueParser.parse(bin, getPofContext());
Date dateGet = pv.getDate();
assertEquals(date, dateGet);
try
{
pv.getInt();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
}
/**
* Test getObjectArray().
*/
@Test
public void testGetObjectArray()
throws IOException
{
Object[] ao = new Object[] {"1", Integer.valueOf(1), Character.valueOf('a')};
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
Binary bin = serialize(ao, MODE_PLAIN, fRefEnabled);
PofValue pv = PofValueParser.parse(bin, getPofContext(fRefEnabled));
Object[] aoGet = pv.getObjectArray();
assertArrayEquals(aoGet, ao);
try
{
pv.getInt();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
if (fRefEnabled)
{
break;
}
fRefEnabled = true;
}
}
/**
* Test getCollection().
*/
@Test
public void testGetCollection()
throws IOException
{
Collection col = new ArrayList();
Collections.addAll(col, "1", Integer.valueOf(1), Character.valueOf('a'));
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
Binary bin = serialize(col, MODE_PLAIN, fRefEnabled);
PofValue pv = PofValueParser.parse(bin, getPofContext(fRefEnabled));
Collection colGet = pv.getCollection(null);
assertEquals(colGet, col);
try
{
pv.getInt();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
Collection col2 = new ArrayList();
col2.add("Append");
colGet = pv.getCollection(col2);
assertSame(col2, colGet);
assertTrue(col2.size() == 4);
assertTrue(col2.contains("Append"));
if (fRefEnabled)
{
break;
}
fRefEnabled = true;
}
}
/**
* Test getMap().
*/
@Test
public void testGetMap()
throws IOException
{
Map map = new HashMap();
map.put("jedan", "1");
map.put("dva", Integer.valueOf(2));
map.put("true", Boolean.TRUE);
// perform the test twice, once with references disable, once with them enabled
for (boolean fRefEnabled = false; ; )
{
Binary bin = serialize(map, MODE_PLAIN, fRefEnabled);
PofValue pv = PofValueParser.parse(bin, getPofContext(fRefEnabled));
Map mapGet = pv.getMap(null);
assertEquals(mapGet, map);
try
{
pv.getInt();
fail("Should've thrown RuntimeException.");
}
catch (RuntimeException ignore) {}
Map map2 = new HashMap();
map2.put("Append", "Value");
mapGet = pv.getMap(map2);
assertSame(map2, mapGet);
assertTrue(map2.size() == 4);
assertTrue(map2.containsKey("Append"));
if (fRefEnabled)
{
break;
}
fRefEnabled = true;
}
}
/**
* Test getBoolean().
*/
@Test
public void testGetBoolean()
throws IOException
{
ObjectWithAllTypes testObj = new ObjectWithAllTypes();
testObj.init();
PofContext ctx = getPofContext();
Binary bin = serialize(testObj, MODE_FMT_EXT);
PofValue pv = PofValueParser.parse(bin, ctx);
PofValue boolOne = new SimplePofPath(0).navigate(pv);
PofValue boolTwo = new SimplePofPath(1).navigate(pv);
assertEquals(false, boolOne.getValue(Boolean.class));
assertEquals(true, boolTwo.getValue());
}
/**
* Test object with nested references().
*/
@Test
public void testReferencesWithComplexObject()
throws IOException
{
PortablePersonReference ivan = new PortablePersonReference("Ivan", new Date(78, 4, 25));
PortablePersonReference goran = new PortablePersonReference("Goran", new Date(82, 3, 3));
PortablePersonReference anna = new PortablePersonReference("Anna", new Date(80, 4, 12));
PortablePerson tom = new PortablePerson("Tom", new Date(103, 7, 5));
PortablePerson ellen = new PortablePerson("Ellen", new Date(105, 3, 15));
ivan.setChildren(null);
goran.setChildren(new PortablePerson[2]);
goran.getChildren()[0] = tom;
goran.getChildren()[1] = ellen;
anna.setChildren(new PortablePerson[2]);
anna.getChildren()[0] = tom;
anna.getChildren()[1] = ellen;
ivan.setSiblings(new PortablePersonReference[1]);
ivan.getSiblings()[0] = goran;
goran.setSiblings(new PortablePersonReference[1]);
goran.getSiblings()[0] = ivan;
goran.setSpouse(anna);
anna.setSpouse(goran);
Map<CompositeKey, PortableObject> mapPerson = new HashMap<CompositeKey, PortableObject>();
String lastName = "Smith";
CompositeKey key1 = new CompositeKey(lastName, "ivan"),
key2 = new CompositeKey(lastName, "goran");
mapPerson.put(key1, ivan);
mapPerson.put(key2, goran);
Binary bin = serialize(mapPerson, MODE_PLAIN, true);
PofValue pv = PofValueParser.parse(bin, getPofContext(true));
Map mapResult = pv.getMap(null);
assertEquals(2, mapResult.size());
PortablePersonReference ivanR = (PortablePersonReference) mapResult.get(key1);
PortablePersonReference goranR = (PortablePersonReference) mapResult.get(key2);
assertEquals(goran.m_sName, goranR.m_sName);
azzert(ivanR.getSiblings()[0] == goranR);
azzert(goranR.getSpouse().getChildren()[0] == goranR.getChildren()[0]);
bin = serialize(ivan, MODE_PLAIN, true);
pv = PofValueParser.parse(bin, getPofContext(true));
ivanR = (PortablePersonReference) pv.getRoot().getValue();
goranR = ivanR.getSiblings()[0];
azzert(goranR.getSiblings()[0] == ivanR);
ivanR = (PortablePersonReference) pv.getValue(PofConstants.T_UNKNOWN);
goranR = ivanR.getSiblings()[0];
azzert(goranR.getSiblings()[0] == ivanR);
}
}
|
googleapis/google-cloud-java | 38,248 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/DeleteHealthCheckRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for HealthChecks.Delete. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.DeleteHealthCheckRequest}
*/
public final class DeleteHealthCheckRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.DeleteHealthCheckRequest)
DeleteHealthCheckRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteHealthCheckRequest.newBuilder() to construct.
private DeleteHealthCheckRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteHealthCheckRequest() {
healthCheck_ = "";
project_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteHealthCheckRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteHealthCheckRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteHealthCheckRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.DeleteHealthCheckRequest.class,
com.google.cloud.compute.v1.DeleteHealthCheckRequest.Builder.class);
}
private int bitField0_;
public static final int HEALTH_CHECK_FIELD_NUMBER = 308876645;
@SuppressWarnings("serial")
private volatile java.lang.Object healthCheck_ = "";
/**
*
*
* <pre>
* Name of the HealthCheck resource to delete.
* </pre>
*
* <code>string health_check = 308876645 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The healthCheck.
*/
@java.lang.Override
public java.lang.String getHealthCheck() {
java.lang.Object ref = healthCheck_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
healthCheck_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the HealthCheck resource to delete.
* </pre>
*
* <code>string health_check = 308876645 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for healthCheck.
*/
@java.lang.Override
public com.google.protobuf.ByteString getHealthCheckBytes() {
java.lang.Object ref = healthCheck_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
healthCheck_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 37109963;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return Whether the requestId field is set.
*/
@java.lang.Override
public boolean hasRequestId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 37109963, requestId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(healthCheck_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 308876645, healthCheck_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(37109963, requestId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(healthCheck_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(308876645, healthCheck_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.DeleteHealthCheckRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.DeleteHealthCheckRequest other =
(com.google.cloud.compute.v1.DeleteHealthCheckRequest) obj;
if (!getHealthCheck().equals(other.getHealthCheck())) return false;
if (!getProject().equals(other.getProject())) return false;
if (hasRequestId() != other.hasRequestId()) return false;
if (hasRequestId()) {
if (!getRequestId().equals(other.getRequestId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + HEALTH_CHECK_FIELD_NUMBER;
hash = (53 * hash) + getHealthCheck().hashCode();
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
if (hasRequestId()) {
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.compute.v1.DeleteHealthCheckRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for HealthChecks.Delete. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.DeleteHealthCheckRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.DeleteHealthCheckRequest)
com.google.cloud.compute.v1.DeleteHealthCheckRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteHealthCheckRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteHealthCheckRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.DeleteHealthCheckRequest.class,
com.google.cloud.compute.v1.DeleteHealthCheckRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.DeleteHealthCheckRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
healthCheck_ = "";
project_ = "";
requestId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteHealthCheckRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteHealthCheckRequest getDefaultInstanceForType() {
return com.google.cloud.compute.v1.DeleteHealthCheckRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteHealthCheckRequest build() {
com.google.cloud.compute.v1.DeleteHealthCheckRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteHealthCheckRequest buildPartial() {
com.google.cloud.compute.v1.DeleteHealthCheckRequest result =
new com.google.cloud.compute.v1.DeleteHealthCheckRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.compute.v1.DeleteHealthCheckRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.healthCheck_ = healthCheck_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.requestId_ = requestId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.DeleteHealthCheckRequest) {
return mergeFrom((com.google.cloud.compute.v1.DeleteHealthCheckRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.DeleteHealthCheckRequest other) {
if (other == com.google.cloud.compute.v1.DeleteHealthCheckRequest.getDefaultInstance())
return this;
if (!other.getHealthCheck().isEmpty()) {
healthCheck_ = other.healthCheck_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasRequestId()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 296879706:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 296879706
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -1823954134:
{
healthCheck_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case -1823954134
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object healthCheck_ = "";
/**
*
*
* <pre>
* Name of the HealthCheck resource to delete.
* </pre>
*
* <code>string health_check = 308876645 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The healthCheck.
*/
public java.lang.String getHealthCheck() {
java.lang.Object ref = healthCheck_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
healthCheck_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the HealthCheck resource to delete.
* </pre>
*
* <code>string health_check = 308876645 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for healthCheck.
*/
public com.google.protobuf.ByteString getHealthCheckBytes() {
java.lang.Object ref = healthCheck_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
healthCheck_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the HealthCheck resource to delete.
* </pre>
*
* <code>string health_check = 308876645 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The healthCheck to set.
* @return This builder for chaining.
*/
public Builder setHealthCheck(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
healthCheck_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the HealthCheck resource to delete.
* </pre>
*
* <code>string health_check = 308876645 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearHealthCheck() {
healthCheck_ = getDefaultInstance().getHealthCheck();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the HealthCheck resource to delete.
* </pre>
*
* <code>string health_check = 308876645 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for healthCheck to set.
* @return This builder for chaining.
*/
public Builder setHealthCheckBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
healthCheck_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return Whether the requestId field is set.
*/
public boolean hasRequestId() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.DeleteHealthCheckRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.DeleteHealthCheckRequest)
private static final com.google.cloud.compute.v1.DeleteHealthCheckRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.DeleteHealthCheckRequest();
}
public static com.google.cloud.compute.v1.DeleteHealthCheckRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteHealthCheckRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteHealthCheckRequest>() {
@java.lang.Override
public DeleteHealthCheckRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeleteHealthCheckRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteHealthCheckRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteHealthCheckRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,402 | java-secretmanager/google-cloud-secretmanager/src/main/java/com/google/cloud/secretmanager/v1/stub/SecretManagerServiceStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.secretmanager.v1.stub;
import static com.google.cloud.secretmanager.v1.SecretManagerServiceClient.ListSecretVersionsPagedResponse;
import static com.google.cloud.secretmanager.v1.SecretManagerServiceClient.ListSecretsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.secretmanager.v1.AccessSecretVersionRequest;
import com.google.cloud.secretmanager.v1.AccessSecretVersionResponse;
import com.google.cloud.secretmanager.v1.AddSecretVersionRequest;
import com.google.cloud.secretmanager.v1.CreateSecretRequest;
import com.google.cloud.secretmanager.v1.DeleteSecretRequest;
import com.google.cloud.secretmanager.v1.DestroySecretVersionRequest;
import com.google.cloud.secretmanager.v1.DisableSecretVersionRequest;
import com.google.cloud.secretmanager.v1.EnableSecretVersionRequest;
import com.google.cloud.secretmanager.v1.GetSecretRequest;
import com.google.cloud.secretmanager.v1.GetSecretVersionRequest;
import com.google.cloud.secretmanager.v1.ListSecretVersionsRequest;
import com.google.cloud.secretmanager.v1.ListSecretVersionsResponse;
import com.google.cloud.secretmanager.v1.ListSecretsRequest;
import com.google.cloud.secretmanager.v1.ListSecretsResponse;
import com.google.cloud.secretmanager.v1.Secret;
import com.google.cloud.secretmanager.v1.SecretVersion;
import com.google.cloud.secretmanager.v1.UpdateSecretRequest;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link SecretManagerServiceStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (secretmanager.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of createSecret:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* SecretManagerServiceStubSettings.Builder secretManagerServiceSettingsBuilder =
* SecretManagerServiceStubSettings.newBuilder();
* secretManagerServiceSettingsBuilder
* .createSecretSettings()
* .setRetrySettings(
* secretManagerServiceSettingsBuilder
* .createSecretSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* SecretManagerServiceStubSettings secretManagerServiceSettings =
* secretManagerServiceSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*/
@Generated("by gapic-generator-java")
public class SecretManagerServiceStubSettings
extends StubSettings<SecretManagerServiceStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final PagedCallSettings<ListSecretsRequest, ListSecretsResponse, ListSecretsPagedResponse>
listSecretsSettings;
private final UnaryCallSettings<CreateSecretRequest, Secret> createSecretSettings;
private final UnaryCallSettings<AddSecretVersionRequest, SecretVersion> addSecretVersionSettings;
private final UnaryCallSettings<GetSecretRequest, Secret> getSecretSettings;
private final UnaryCallSettings<UpdateSecretRequest, Secret> updateSecretSettings;
private final UnaryCallSettings<DeleteSecretRequest, Empty> deleteSecretSettings;
private final PagedCallSettings<
ListSecretVersionsRequest, ListSecretVersionsResponse, ListSecretVersionsPagedResponse>
listSecretVersionsSettings;
private final UnaryCallSettings<GetSecretVersionRequest, SecretVersion> getSecretVersionSettings;
private final UnaryCallSettings<AccessSecretVersionRequest, AccessSecretVersionResponse>
accessSecretVersionSettings;
private final UnaryCallSettings<DisableSecretVersionRequest, SecretVersion>
disableSecretVersionSettings;
private final UnaryCallSettings<EnableSecretVersionRequest, SecretVersion>
enableSecretVersionSettings;
private final UnaryCallSettings<DestroySecretVersionRequest, SecretVersion>
destroySecretVersionSettings;
private final UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings;
private final UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings;
private final UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings;
private static final PagedListDescriptor<ListSecretsRequest, ListSecretsResponse, Secret>
LIST_SECRETS_PAGE_STR_DESC =
new PagedListDescriptor<ListSecretsRequest, ListSecretsResponse, Secret>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListSecretsRequest injectToken(ListSecretsRequest payload, String token) {
return ListSecretsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListSecretsRequest injectPageSize(ListSecretsRequest payload, int pageSize) {
return ListSecretsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListSecretsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListSecretsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Secret> extractResources(ListSecretsResponse payload) {
return payload.getSecretsList();
}
};
private static final PagedListDescriptor<
ListSecretVersionsRequest, ListSecretVersionsResponse, SecretVersion>
LIST_SECRET_VERSIONS_PAGE_STR_DESC =
new PagedListDescriptor<
ListSecretVersionsRequest, ListSecretVersionsResponse, SecretVersion>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListSecretVersionsRequest injectToken(
ListSecretVersionsRequest payload, String token) {
return ListSecretVersionsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListSecretVersionsRequest injectPageSize(
ListSecretVersionsRequest payload, int pageSize) {
return ListSecretVersionsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListSecretVersionsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListSecretVersionsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<SecretVersion> extractResources(ListSecretVersionsResponse payload) {
return payload.getVersionsList();
}
};
private static final PagedListResponseFactory<
ListSecretsRequest, ListSecretsResponse, ListSecretsPagedResponse>
LIST_SECRETS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListSecretsRequest, ListSecretsResponse, ListSecretsPagedResponse>() {
@Override
public ApiFuture<ListSecretsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListSecretsRequest, ListSecretsResponse> callable,
ListSecretsRequest request,
ApiCallContext context,
ApiFuture<ListSecretsResponse> futureResponse) {
PageContext<ListSecretsRequest, ListSecretsResponse, Secret> pageContext =
PageContext.create(callable, LIST_SECRETS_PAGE_STR_DESC, request, context);
return ListSecretsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListSecretVersionsRequest, ListSecretVersionsResponse, ListSecretVersionsPagedResponse>
LIST_SECRET_VERSIONS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListSecretVersionsRequest,
ListSecretVersionsResponse,
ListSecretVersionsPagedResponse>() {
@Override
public ApiFuture<ListSecretVersionsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListSecretVersionsRequest, ListSecretVersionsResponse> callable,
ListSecretVersionsRequest request,
ApiCallContext context,
ApiFuture<ListSecretVersionsResponse> futureResponse) {
PageContext<ListSecretVersionsRequest, ListSecretVersionsResponse, SecretVersion>
pageContext =
PageContext.create(
callable, LIST_SECRET_VERSIONS_PAGE_STR_DESC, request, context);
return ListSecretVersionsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to listSecrets. */
public PagedCallSettings<ListSecretsRequest, ListSecretsResponse, ListSecretsPagedResponse>
listSecretsSettings() {
return listSecretsSettings;
}
/** Returns the object with the settings used for calls to createSecret. */
public UnaryCallSettings<CreateSecretRequest, Secret> createSecretSettings() {
return createSecretSettings;
}
/** Returns the object with the settings used for calls to addSecretVersion. */
public UnaryCallSettings<AddSecretVersionRequest, SecretVersion> addSecretVersionSettings() {
return addSecretVersionSettings;
}
/** Returns the object with the settings used for calls to getSecret. */
public UnaryCallSettings<GetSecretRequest, Secret> getSecretSettings() {
return getSecretSettings;
}
/** Returns the object with the settings used for calls to updateSecret. */
public UnaryCallSettings<UpdateSecretRequest, Secret> updateSecretSettings() {
return updateSecretSettings;
}
/** Returns the object with the settings used for calls to deleteSecret. */
public UnaryCallSettings<DeleteSecretRequest, Empty> deleteSecretSettings() {
return deleteSecretSettings;
}
/** Returns the object with the settings used for calls to listSecretVersions. */
public PagedCallSettings<
ListSecretVersionsRequest, ListSecretVersionsResponse, ListSecretVersionsPagedResponse>
listSecretVersionsSettings() {
return listSecretVersionsSettings;
}
/** Returns the object with the settings used for calls to getSecretVersion. */
public UnaryCallSettings<GetSecretVersionRequest, SecretVersion> getSecretVersionSettings() {
return getSecretVersionSettings;
}
/** Returns the object with the settings used for calls to accessSecretVersion. */
public UnaryCallSettings<AccessSecretVersionRequest, AccessSecretVersionResponse>
accessSecretVersionSettings() {
return accessSecretVersionSettings;
}
/** Returns the object with the settings used for calls to disableSecretVersion. */
public UnaryCallSettings<DisableSecretVersionRequest, SecretVersion>
disableSecretVersionSettings() {
return disableSecretVersionSettings;
}
/** Returns the object with the settings used for calls to enableSecretVersion. */
public UnaryCallSettings<EnableSecretVersionRequest, SecretVersion>
enableSecretVersionSettings() {
return enableSecretVersionSettings;
}
/** Returns the object with the settings used for calls to destroySecretVersion. */
public UnaryCallSettings<DestroySecretVersionRequest, SecretVersion>
destroySecretVersionSettings() {
return destroySecretVersionSettings;
}
/** Returns the object with the settings used for calls to setIamPolicy. */
public UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the object with the settings used for calls to getIamPolicy. */
public UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the object with the settings used for calls to testIamPermissions. */
public UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
public SecretManagerServiceStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcSecretManagerServiceStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonSecretManagerServiceStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "secretmanager";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "secretmanager.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "secretmanager.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(SecretManagerServiceStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(SecretManagerServiceStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return SecretManagerServiceStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected SecretManagerServiceStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
listSecretsSettings = settingsBuilder.listSecretsSettings().build();
createSecretSettings = settingsBuilder.createSecretSettings().build();
addSecretVersionSettings = settingsBuilder.addSecretVersionSettings().build();
getSecretSettings = settingsBuilder.getSecretSettings().build();
updateSecretSettings = settingsBuilder.updateSecretSettings().build();
deleteSecretSettings = settingsBuilder.deleteSecretSettings().build();
listSecretVersionsSettings = settingsBuilder.listSecretVersionsSettings().build();
getSecretVersionSettings = settingsBuilder.getSecretVersionSettings().build();
accessSecretVersionSettings = settingsBuilder.accessSecretVersionSettings().build();
disableSecretVersionSettings = settingsBuilder.disableSecretVersionSettings().build();
enableSecretVersionSettings = settingsBuilder.enableSecretVersionSettings().build();
destroySecretVersionSettings = settingsBuilder.destroySecretVersionSettings().build();
setIamPolicySettings = settingsBuilder.setIamPolicySettings().build();
getIamPolicySettings = settingsBuilder.getIamPolicySettings().build();
testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build();
}
/** Builder for SecretManagerServiceStubSettings. */
public static class Builder
extends StubSettings.Builder<SecretManagerServiceStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
ListSecretsRequest, ListSecretsResponse, ListSecretsPagedResponse>
listSecretsSettings;
private final UnaryCallSettings.Builder<CreateSecretRequest, Secret> createSecretSettings;
private final UnaryCallSettings.Builder<AddSecretVersionRequest, SecretVersion>
addSecretVersionSettings;
private final UnaryCallSettings.Builder<GetSecretRequest, Secret> getSecretSettings;
private final UnaryCallSettings.Builder<UpdateSecretRequest, Secret> updateSecretSettings;
private final UnaryCallSettings.Builder<DeleteSecretRequest, Empty> deleteSecretSettings;
private final PagedCallSettings.Builder<
ListSecretVersionsRequest, ListSecretVersionsResponse, ListSecretVersionsPagedResponse>
listSecretVersionsSettings;
private final UnaryCallSettings.Builder<GetSecretVersionRequest, SecretVersion>
getSecretVersionSettings;
private final UnaryCallSettings.Builder<AccessSecretVersionRequest, AccessSecretVersionResponse>
accessSecretVersionSettings;
private final UnaryCallSettings.Builder<DisableSecretVersionRequest, SecretVersion>
disableSecretVersionSettings;
private final UnaryCallSettings.Builder<EnableSecretVersionRequest, SecretVersion>
enableSecretVersionSettings;
private final UnaryCallSettings.Builder<DestroySecretVersionRequest, SecretVersion>
destroySecretVersionSettings;
private final UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings;
private final UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings;
private final UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"no_retry_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
definitions.put(
"retry_policy_1_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.UNAVAILABLE, StatusCode.Code.RESOURCE_EXHAUSTED)));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("no_retry_0_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(2000L))
.setRetryDelayMultiplier(2.0)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_1_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
listSecretsSettings = PagedCallSettings.newBuilder(LIST_SECRETS_PAGE_STR_FACT);
createSecretSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
addSecretVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getSecretSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateSecretSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteSecretSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listSecretVersionsSettings = PagedCallSettings.newBuilder(LIST_SECRET_VERSIONS_PAGE_STR_FACT);
getSecretVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
accessSecretVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
disableSecretVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
enableSecretVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
destroySecretVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listSecretsSettings,
createSecretSettings,
addSecretVersionSettings,
getSecretSettings,
updateSecretSettings,
deleteSecretSettings,
listSecretVersionsSettings,
getSecretVersionSettings,
accessSecretVersionSettings,
disableSecretVersionSettings,
enableSecretVersionSettings,
destroySecretVersionSettings,
setIamPolicySettings,
getIamPolicySettings,
testIamPermissionsSettings);
initDefaults(this);
}
protected Builder(SecretManagerServiceStubSettings settings) {
super(settings);
listSecretsSettings = settings.listSecretsSettings.toBuilder();
createSecretSettings = settings.createSecretSettings.toBuilder();
addSecretVersionSettings = settings.addSecretVersionSettings.toBuilder();
getSecretSettings = settings.getSecretSettings.toBuilder();
updateSecretSettings = settings.updateSecretSettings.toBuilder();
deleteSecretSettings = settings.deleteSecretSettings.toBuilder();
listSecretVersionsSettings = settings.listSecretVersionsSettings.toBuilder();
getSecretVersionSettings = settings.getSecretVersionSettings.toBuilder();
accessSecretVersionSettings = settings.accessSecretVersionSettings.toBuilder();
disableSecretVersionSettings = settings.disableSecretVersionSettings.toBuilder();
enableSecretVersionSettings = settings.enableSecretVersionSettings.toBuilder();
destroySecretVersionSettings = settings.destroySecretVersionSettings.toBuilder();
setIamPolicySettings = settings.setIamPolicySettings.toBuilder();
getIamPolicySettings = settings.getIamPolicySettings.toBuilder();
testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listSecretsSettings,
createSecretSettings,
addSecretVersionSettings,
getSecretSettings,
updateSecretSettings,
deleteSecretSettings,
listSecretVersionsSettings,
getSecretVersionSettings,
accessSecretVersionSettings,
disableSecretVersionSettings,
enableSecretVersionSettings,
destroySecretVersionSettings,
setIamPolicySettings,
getIamPolicySettings,
testIamPermissionsSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.listSecretsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.createSecretSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.addSecretVersionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.getSecretSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.updateSecretSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.deleteSecretSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.listSecretVersionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.getSecretVersionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.accessSecretVersionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"));
builder
.disableSecretVersionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.enableSecretVersionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.destroySecretVersionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.setIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.getIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.testIamPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to listSecrets. */
public PagedCallSettings.Builder<
ListSecretsRequest, ListSecretsResponse, ListSecretsPagedResponse>
listSecretsSettings() {
return listSecretsSettings;
}
/** Returns the builder for the settings used for calls to createSecret. */
public UnaryCallSettings.Builder<CreateSecretRequest, Secret> createSecretSettings() {
return createSecretSettings;
}
/** Returns the builder for the settings used for calls to addSecretVersion. */
public UnaryCallSettings.Builder<AddSecretVersionRequest, SecretVersion>
addSecretVersionSettings() {
return addSecretVersionSettings;
}
/** Returns the builder for the settings used for calls to getSecret. */
public UnaryCallSettings.Builder<GetSecretRequest, Secret> getSecretSettings() {
return getSecretSettings;
}
/** Returns the builder for the settings used for calls to updateSecret. */
public UnaryCallSettings.Builder<UpdateSecretRequest, Secret> updateSecretSettings() {
return updateSecretSettings;
}
/** Returns the builder for the settings used for calls to deleteSecret. */
public UnaryCallSettings.Builder<DeleteSecretRequest, Empty> deleteSecretSettings() {
return deleteSecretSettings;
}
/** Returns the builder for the settings used for calls to listSecretVersions. */
public PagedCallSettings.Builder<
ListSecretVersionsRequest, ListSecretVersionsResponse, ListSecretVersionsPagedResponse>
listSecretVersionsSettings() {
return listSecretVersionsSettings;
}
/** Returns the builder for the settings used for calls to getSecretVersion. */
public UnaryCallSettings.Builder<GetSecretVersionRequest, SecretVersion>
getSecretVersionSettings() {
return getSecretVersionSettings;
}
/** Returns the builder for the settings used for calls to accessSecretVersion. */
public UnaryCallSettings.Builder<AccessSecretVersionRequest, AccessSecretVersionResponse>
accessSecretVersionSettings() {
return accessSecretVersionSettings;
}
/** Returns the builder for the settings used for calls to disableSecretVersion. */
public UnaryCallSettings.Builder<DisableSecretVersionRequest, SecretVersion>
disableSecretVersionSettings() {
return disableSecretVersionSettings;
}
/** Returns the builder for the settings used for calls to enableSecretVersion. */
public UnaryCallSettings.Builder<EnableSecretVersionRequest, SecretVersion>
enableSecretVersionSettings() {
return enableSecretVersionSettings;
}
/** Returns the builder for the settings used for calls to destroySecretVersion. */
public UnaryCallSettings.Builder<DestroySecretVersionRequest, SecretVersion>
destroySecretVersionSettings() {
return destroySecretVersionSettings;
}
/** Returns the builder for the settings used for calls to setIamPolicy. */
public UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the builder for the settings used for calls to getIamPolicy. */
public UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the builder for the settings used for calls to testIamPermissions. */
public UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
@Override
public SecretManagerServiceStubSettings build() throws IOException {
return new SecretManagerServiceStubSettings(this);
}
}
}
|
oracle/graal | 38,162 | truffle/src/com.oracle.truffle.api.bytecode.test/src/com/oracle/truffle/api/bytecode/test/basic_interpreter/LocalsTest.java | /*
* Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.truffle.api.bytecode.test.basic_interpreter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.List;
import java.util.function.BiConsumer;
import org.junit.Test;
import com.oracle.truffle.api.bytecode.BytecodeConfig;
import com.oracle.truffle.api.bytecode.BytecodeLocal;
import com.oracle.truffle.api.bytecode.BytecodeNode;
import com.oracle.truffle.api.bytecode.BytecodeParser;
import com.oracle.truffle.api.bytecode.BytecodeRootNodes;
import com.oracle.truffle.api.bytecode.Instruction;
import com.oracle.truffle.api.bytecode.Instruction.Argument;
import com.oracle.truffle.api.bytecode.Instruction.Argument.Kind;
import com.oracle.truffle.api.bytecode.LocalVariable;
import com.oracle.truffle.api.frame.FrameSlotKind;
import com.oracle.truffle.api.frame.FrameSlotTypeException;
import com.oracle.truffle.api.frame.MaterializedFrame;
public class LocalsTest extends AbstractBasicInterpreterTest {
public LocalsTest(TestRun run) {
super(run);
}
@Test
public void testBasicLocals() {
for (int i = 0; i < 100; i++) {
assertBasicLocals(i);
}
assertBasicLocals(1000);
}
private void assertBasicLocals(int localCount) {
// l = 42;
// return l;
BasicInterpreter root = parseNode("manyLocals" + localCount, b -> {
b.beginRoot();
BytecodeLocal[] locals = new BytecodeLocal[localCount];
for (int i = 0; i < localCount; i++) {
locals[i] = b.createLocal("name" + i, "info" + i);
}
for (int i = 0; i < localCount; i++) {
b.beginStoreLocal(locals[i]);
b.emitLoadConstant((long) i);
b.endStoreLocal();
}
b.beginReturn();
if (locals.length > 0) {
b.emitLoadLocal(locals[0]);
} else {
b.emitLoadConstant(0L);
}
b.endReturn();
b.endRoot();
});
BytecodeNode b = root.getBytecodeNode();
Instruction last = b.getInstructionsAsList().getLast();
assertEquals(localCount, b.getLocalCount(0));
int lastBci = last.getBytecodeIndex();
assertEquals(localCount, b.getLocalCount(lastBci));
assertEquals(localCount, b.getLocals().size());
for (int i = 0; i < localCount; i++) {
LocalVariable l = b.getLocals().get(i);
if (run.hasBlockScoping()) {
assertEquals(0, l.getStartIndex());
assertEquals(last.getNextBytecodeIndex(), l.getEndIndex());
} else {
assertEquals(-1, l.getStartIndex());
assertEquals(-1, l.getEndIndex());
}
assertEquals("name" + i, l.getName());
assertEquals("info" + i, l.getInfo());
assertNotNull(l.toString());
}
assertEquals(0L, root.getCallTarget().call());
}
@Test
public void testFinally() {
// @formatter:off
// l0 = 1;
// try
// l1 = l0
// if (true) {
// return l1
// }
// } finally {
// l2 = false
// }
// return l0;
// @formatter:on
BasicInterpreter root = parseNode("scopedLocals", b -> {
b.beginRoot();
BytecodeLocal l0 = b.createLocal("l0", null);
// l0 = 1
b.beginStoreLocal(l0);
b.emitLoadConstant(1L);
b.endStoreLocal();
b.beginTryFinally(() -> {
// finally block
b.beginBlock();
BytecodeLocal l2 = b.createLocal("l2", null);
b.beginStoreLocal(l2);
b.emitLoadConstant(false);
b.endStoreLocal();
b.endBlock();
});
// try block
b.beginBlock();
BytecodeLocal l1 = b.createLocal("l1", null);
b.beginStoreLocal(l1);
b.emitLoadLocal(l0);
b.endStoreLocal();
b.beginIfThen();
b.emitLoadConstant(true);
b.beginReturn();
b.emitLoadLocal(l0);
b.endReturn();
b.endIfThen();
b.emitLoadConstant(123L);
b.endBlock();
b.endTryFinally();
b.beginReturn();
b.emitLoadLocal(l0);
b.endReturn();
b.endRoot();
});
root.getBytecodeNode().setUncachedThreshold(0);
assertEquals(1L, root.getCallTarget().call());
BytecodeNode b = root.getBytecodeNode();
List<LocalVariable> locals = b.getLocals();
if (run.hasBlockScoping()) {
assertEquals(6, locals.size());
LocalVariable l0 = locals.get(0); // can be merged
LocalVariable l1a = locals.get(1);
LocalVariable l2a = locals.get(2); // early return handler
LocalVariable l1b = locals.get(3);
LocalVariable l2b = locals.get(4); // fallthrough handler
LocalVariable l2c = locals.get(5); // exceptional handler
assertEquals("l0", l0.getName());
assertEquals("l1", l1a.getName());
assertEquals("l1", l1b.getName());
assertEquals(l1a.getLocalOffset(), l1b.getLocalOffset());
assertEquals(l1a.getLocalIndex(), l1b.getLocalIndex());
assertEquals("l2", l2a.getName());
assertEquals("l2", l2b.getName());
assertEquals("l2", l2c.getName());
assertTrue(l2a.getLocalIndex() != l2b.getLocalIndex());
assertTrue(l2b.getLocalIndex() != l2c.getLocalIndex());
if (run.hasBoxingElimination()) {
assertEquals(FrameSlotKind.Long, l0.getTypeProfile());
assertEquals(FrameSlotKind.Long, l1a.getTypeProfile());
assertEquals(FrameSlotKind.Long, l1b.getTypeProfile());
assertEquals(FrameSlotKind.Boolean, l2a.getTypeProfile());
// Locals in finally handlers are unique. The fallthrough/exception handlers haven't
// been hit.
assertEquals(FrameSlotKind.Illegal, l2b.getTypeProfile());
assertEquals(FrameSlotKind.Illegal, l2c.getTypeProfile());
} else {
assertNull(l0.getTypeProfile());
assertNull(l1a.getTypeProfile());
assertNull(l1b.getTypeProfile());
assertNull(l2a.getTypeProfile());
assertNull(l2b.getTypeProfile());
assertNull(l2c.getTypeProfile());
}
// Use the load.constant consts to identify which block an instruction belongs to.
for (Instruction instruction : b.getInstructions()) {
if (!instruction.getName().equals("load.constant")) {
continue;
}
for (Argument arg : instruction.getArguments()) {
if (arg.getKind() != Kind.CONSTANT) {
continue;
}
Object constant = arg.asConstant();
if (constant == Long.valueOf(1L)) {
// root block
int bci = instruction.getBytecodeIndex();
assertEquals(1, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertNull(b.getLocalInfo(bci, 0));
} else if (constant == Boolean.valueOf(true) || constant == Long.valueOf(123L)) {
// try block
int bci = instruction.getBytecodeIndex();
assertEquals(2, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertNull(b.getLocalInfo(bci, 0));
assertEquals("l1", b.getLocalName(bci, 1));
assertNull(b.getLocalInfo(bci, 1));
} else if (constant == Boolean.valueOf(false)) {
// finally block
int bci = instruction.getBytecodeIndex();
assertEquals(2, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertNull(b.getLocalInfo(bci, 0));
assertEquals("l2", b.getLocalName(bci, 1));
assertNull(b.getLocalInfo(bci, 1));
} else {
fail("Unexpected constant " + constant);
}
}
}
} else {
assertEquals(5, locals.size());
LocalVariable l0 = locals.get(0);
LocalVariable l1 = locals.get(1);
LocalVariable l2a = locals.get(2); // early return handler
LocalVariable l2b = locals.get(3); // fallthrough handler
LocalVariable l2c = locals.get(4); // exceptional handler
assertEquals("l0", l0.getName());
assertEquals("l1", l1.getName());
assertEquals("l2", l2a.getName());
assertEquals("l2", l2b.getName());
assertEquals("l2", l2c.getName());
}
}
@Test
public void testScopedLocals() {
// @formatter:off
// // B0
// l0 = 1;
// { // B1
// l1 = l0
// }
// l2 = 42
// { // B2
// l3 = l0
// { // B3
// l4 = l3
// l3 = l2
// }
// l0 = l3
// }
// return l0
// @formatter:on
BasicInterpreter root = parseNode("scopedLocals", b -> {
b.beginRoot();
// l0 = 1
BytecodeLocal l0 = b.createLocal("l0", null);
b.beginStoreLocal(l0);
b.emitLoadConstant(1L);
b.endStoreLocal();
b.beginBlock();
// l1 = l0
BytecodeLocal l1 = b.createLocal("l1", null);
b.beginStoreLocal(l1);
b.emitLoadLocal(l0);
b.endStoreLocal();
b.endBlock();
// l2 = 42
BytecodeLocal l2 = b.createLocal("l2", null);
b.beginStoreLocal(l2);
b.emitLoadConstant(42L);
b.endStoreLocal();
b.beginBlock();
// l3 = l0
BytecodeLocal l3 = b.createLocal("l3", null);
b.beginStoreLocal(l3);
b.emitLoadLocal(l0);
b.endStoreLocal();
b.beginBlock();
// l4 = l3
BytecodeLocal l4 = b.createLocal("l4", null);
b.beginStoreLocal(l4);
b.emitLoadLocal(l3);
b.endStoreLocal();
// l3 = l2
b.beginStoreLocal(l3);
b.emitLoadLocal(l2);
b.endStoreLocal();
b.endBlock();
// l0 = l3
b.beginStoreLocal(l0);
b.emitLoadLocal(l3);
b.endStoreLocal();
b.endBlock();
// return l0
b.beginReturn();
b.emitLoadLocal(l0);
b.endReturn();
b.endRoot();
});
BytecodeNode b = root.getBytecodeNode();
List<Instruction> instructions = b.getInstructionsAsList();
Instruction last = b.getInstructionsAsList().getLast();
int endBci = last.getNextBytecodeIndex();
List<LocalVariable> locals = b.getLocals();
assertEquals(5, locals.size());
assertEquals(42L, root.getCallTarget().call());
if (run.hasBlockScoping()) {
assertEquals(0, locals.get(0).getStartIndex());
assertEquals(endBci, locals.get(0).getEndIndex());
assertEquals("l0", locals.get(0).getName());
assertEquals(instructions.get(2).getBytecodeIndex(), locals.get(1).getStartIndex());
assertEquals(instructions.get(4).getBytecodeIndex(), locals.get(1).getEndIndex());
assertEquals("l1", locals.get(1).getName());
assertEquals(instructions.get(5).getBytecodeIndex(), locals.get(2).getStartIndex());
assertEquals(endBci, locals.get(2).getEndIndex());
assertEquals("l2", locals.get(2).getName());
// l1 and l2 should use the same frame slot.
assertEquals(locals.get(1).getLocalOffset(), locals.get(2).getLocalOffset());
assertEquals(instructions.get(7).getBytecodeIndex(), locals.get(3).getStartIndex());
assertEquals(instructions.get(16).getBytecodeIndex(), locals.get(3).getEndIndex());
assertEquals("l3", locals.get(3).getName());
assertEquals(instructions.get(9).getBytecodeIndex(), locals.get(4).getStartIndex());
assertEquals(instructions.get(13).getBytecodeIndex(), locals.get(4).getEndIndex());
assertEquals("l4", locals.get(4).getName());
int bci;
// B0
bci = 0;
assertEquals(1, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertNull(b.getLocalInfo(bci, 0));
bci = instructions.get(4).getBytecodeIndex();
assertEquals(1, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertNull(b.getLocalInfo(bci, 0));
bci = instructions.get(5).getBytecodeIndex();
assertEquals(2, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertEquals("l2", b.getLocalName(bci, 1));
assertNull(b.getLocalInfo(bci, 0));
assertNull(b.getLocalInfo(bci, 1));
bci = last.getBytecodeIndex();
assertEquals(2, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertEquals("l2", b.getLocalName(bci, 1));
assertNull(b.getLocalInfo(bci, 0));
assertNull(b.getLocalInfo(bci, 1));
// B1
bci = instructions.get(1).getBytecodeIndex();
assertEquals(1, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertNull(b.getLocalInfo(bci, 0));
bci = instructions.get(2).getBytecodeIndex();
assertEquals(2, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertEquals("l1", b.getLocalName(bci, 1));
assertNull(b.getLocalInfo(bci, 0));
assertNull(b.getLocalInfo(bci, 1));
bci = instructions.get(4).getBytecodeIndex();
assertEquals(1, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertNull(b.getLocalInfo(bci, 0));
// B2
bci = instructions.get(6).getBytecodeIndex();
assertEquals(2, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertEquals("l2", b.getLocalName(bci, 1));
assertNull(b.getLocalInfo(bci, 0));
assertNull(b.getLocalInfo(bci, 1));
bci = instructions.get(8).getBytecodeIndex();
assertEquals(3, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertEquals("l2", b.getLocalName(bci, 1));
assertEquals("l3", b.getLocalName(bci, 2));
assertNull(b.getLocalInfo(bci, 0));
assertNull(b.getLocalInfo(bci, 1));
assertNull(b.getLocalInfo(bci, 2));
bci = instructions.get(15).getBytecodeIndex();
assertEquals(3, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertEquals("l2", b.getLocalName(bci, 1));
assertEquals("l3", b.getLocalName(bci, 2));
assertNull(b.getLocalInfo(bci, 0));
assertNull(b.getLocalInfo(bci, 1));
assertNull(b.getLocalInfo(bci, 2));
bci = instructions.get(16).getBytecodeIndex();
assertEquals(2, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertEquals("l2", b.getLocalName(bci, 1));
assertNull(b.getLocalInfo(bci, 0));
assertNull(b.getLocalInfo(bci, 1));
// B3
bci = instructions.get(8).getBytecodeIndex();
assertEquals(3, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertEquals("l2", b.getLocalName(bci, 1));
assertEquals("l3", b.getLocalName(bci, 2));
assertNull(b.getLocalInfo(bci, 0));
assertNull(b.getLocalInfo(bci, 1));
assertNull(b.getLocalInfo(bci, 2));
bci = instructions.get(9).getBytecodeIndex();
assertEquals(4, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertEquals("l2", b.getLocalName(bci, 1));
assertEquals("l3", b.getLocalName(bci, 2));
assertEquals("l4", b.getLocalName(bci, 3));
assertNull(b.getLocalInfo(bci, 0));
assertNull(b.getLocalInfo(bci, 1));
assertNull(b.getLocalInfo(bci, 2));
assertNull(b.getLocalInfo(bci, 3));
bci = instructions.get(13).getBytecodeIndex();
assertEquals(3, b.getLocalCount(bci));
assertEquals("l0", b.getLocalName(bci, 0));
assertEquals("l2", b.getLocalName(bci, 1));
assertEquals("l3", b.getLocalName(bci, 2));
assertNull(b.getLocalInfo(bci, 0));
assertNull(b.getLocalInfo(bci, 1));
assertNull(b.getLocalInfo(bci, 2));
}
}
@Test
public void testScopedLocals2() {
// @formatter:off
// // B0
// l0 = 42L;
// {
// l1 = ""
// l2 = 42L
// }
// {
// l1 = 42L
// l2 = ""
// }
// return l0
// @formatter:on
BasicInterpreter root = parseNode("scopedLocals2", b -> {
b.beginRoot();
BytecodeLocal l0 = b.createLocal("l0", null);
b.beginStoreLocal(l0);
b.emitLoadConstant(42L);
b.endStoreLocal();
b.beginBlock();
BytecodeLocal l1 = b.createLocal("l1", null);
b.beginStoreLocal(l1);
b.emitLoadConstant("");
b.endStoreLocal();
BytecodeLocal l2 = b.createLocal("l2", null);
b.beginStoreLocal(l2);
b.emitLoadConstant(42L);
b.endStoreLocal();
b.endBlock();
b.beginBlock();
l1 = b.createLocal("l1", null);
b.beginStoreLocal(l1);
b.emitLoadConstant(42L);
b.endStoreLocal();
l2 = b.createLocal("l2", null);
b.beginStoreLocal(l2);
b.emitLoadConstant("");
b.endStoreLocal();
b.endBlock();
b.beginReturn();
b.emitLoadLocal(l0);
b.endReturn();
b.endRoot();
});
List<LocalVariable> locals = root.getBytecodeNode().getLocals();
assertEquals(5, locals.size());
LocalVariable l0 = locals.get(0);
LocalVariable l1a = locals.get(1);
LocalVariable l2a = locals.get(2);
LocalVariable l1b = locals.get(3);
LocalVariable l2b = locals.get(4);
assertEquals("l0", l0.getName());
assertEquals("l1", l1a.getName());
assertEquals("l2", l2a.getName());
assertEquals("l1", l1b.getName());
assertEquals("l2", l2b.getName());
assertNull(l0.getInfo());
assertNull(l1a.getInfo());
assertNull(l2a.getInfo());
assertNull(l1b.getInfo());
assertNull(l2b.getInfo());
assertNotNull(l0.toString());
assertNotNull(l1a.toString());
assertNotNull(l2a.toString());
assertNotNull(l1b.toString());
assertNotNull(l2b.toString());
assertNull(l0.getTypeProfile());
assertNull(l1a.getTypeProfile());
assertNull(l2a.getTypeProfile());
assertNull(l1b.getTypeProfile());
assertNull(l2b.getTypeProfile());
if (run.hasRootScoping()) {
assertEquals(0, l0.getLocalOffset());
assertEquals(1, l1a.getLocalOffset());
assertEquals(2, l2a.getLocalOffset());
assertEquals(3, l1b.getLocalOffset());
assertEquals(4, l2b.getLocalOffset());
} else {
assertEquals(0, l0.getLocalOffset());
assertEquals(1, l1a.getLocalOffset());
assertEquals(2, l2a.getLocalOffset());
assertEquals(1, l1b.getLocalOffset());
assertEquals(2, l2b.getLocalOffset());
}
assertEquals(0, l0.getLocalIndex());
assertEquals(1, l1a.getLocalIndex());
assertEquals(2, l2a.getLocalIndex());
assertEquals(3, l1b.getLocalIndex());
assertEquals(4, l2b.getLocalIndex());
root.getBytecodeNode().setUncachedThreshold(0);
assertEquals(42L, root.getCallTarget().call());
// re-read locals as old
locals = root.getBytecodeNode().getLocals();
l0 = locals.get(0);
l1a = locals.get(1);
l2a = locals.get(2);
l1b = locals.get(3);
l2b = locals.get(4);
if (run.hasBoxingElimination()) {
assertEquals(FrameSlotKind.Long, l0.getTypeProfile());
assertEquals(FrameSlotKind.Object, l1a.getTypeProfile());
assertEquals(FrameSlotKind.Long, l2a.getTypeProfile());
assertEquals(FrameSlotKind.Long, l1b.getTypeProfile());
assertEquals(FrameSlotKind.Object, l2b.getTypeProfile());
} else {
// no profile collected if not boxing-eliminated
assertNull(l0.getTypeProfile());
assertNull(l1a.getTypeProfile());
assertNull(l2a.getTypeProfile());
assertNull(l1b.getTypeProfile());
assertNull(l2b.getTypeProfile());
}
assertEquals(42L, root.getCallTarget().call());
if (run.hasBoxingElimination()) {
assertEquals(FrameSlotKind.Long, l0.getTypeProfile());
assertEquals(FrameSlotKind.Object, l1a.getTypeProfile());
assertEquals(FrameSlotKind.Long, l2a.getTypeProfile());
assertEquals(FrameSlotKind.Long, l1b.getTypeProfile());
assertEquals(FrameSlotKind.Object, l2b.getTypeProfile());
} else {
// no profile collected if not boxing-eliminated
assertNull(l0.getTypeProfile());
assertNull(l1a.getTypeProfile());
assertNull(l2a.getTypeProfile());
assertNull(l1b.getTypeProfile());
assertNull(l2b.getTypeProfile());
}
}
@Test
public void testMaterializedAccessUpdatesTag() {
// @formatter:off
// def outer(materializeFrame):
// x = 42L
// def inner(newValue):
// x = newValue;
// return materializeFrame ? materialize() : x
// @formatter:on
BytecodeRootNodes<BasicInterpreter> roots = createNodes(BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
BytecodeLocal x = b.createLocal("x", null);
b.beginStoreLocal(x);
b.emitLoadConstant(42L);
b.endStoreLocal();
b.beginRoot();
b.beginStoreLocalMaterialized(x);
b.emitLoadArgument(0);
b.emitLoadArgument(1);
b.endStoreLocalMaterialized();
b.endRoot();
b.beginReturn();
b.beginConditional();
b.emitLoadArgument(0);
b.emitMaterializeFrame();
b.emitLoadLocal(x);
b.endConditional();
b.endReturn();
b.endRoot();
});
BasicInterpreter outer = roots.getNode(0);
BasicInterpreter inner = roots.getNode(1);
List<LocalVariable> locals = outer.getBytecodeNode().getLocals();
assertEquals(1, locals.size());
LocalVariable x = locals.get(0);
assertEquals("x", x.getName());
assertNull(x.getTypeProfile());
// force cached
outer.getBytecodeNode().setUncachedThreshold(0);
assertEquals(42L, outer.getCallTarget().call(false));
if (run.hasBoxingElimination()) {
// The tag should be updated.
assertEquals(FrameSlotKind.Long, outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
} else {
assertNull(outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
}
MaterializedFrame outerFrame = (MaterializedFrame) outer.getCallTarget().call(true);
if (run.hasBoxingElimination()) {
// The tag should stay the same.
inner.getCallTarget().call(outerFrame, 123L);
assertEquals(FrameSlotKind.Long, outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
// If we use a different type, it should reset the tag to Object.
inner.getCallTarget().call(outerFrame, "hello");
assertEquals(FrameSlotKind.Object, outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
} else {
assertNull(outer.getBytecodeNode().getLocals().get(0).getTypeProfile());
}
// Outer should still execute even with updated tags.
assertEquals(42L, outer.getCallTarget().call(false));
}
@Test
public void testIllegalOrDefault() {
// @formatter:off
// // B0
// result;
// {
// var l0;
// if (arg0) {
// result = l0
// } else {
// l0 = 42L
// }
// }
// {
// var l1;
// result = l1;
// }
// return result
// @formatter:on
BasicInterpreter root = parseNode("illegalDefaults", b -> {
b.beginRoot();
BytecodeLocal result = b.createLocal("result", null);
b.beginBlock();
BytecodeLocal l = b.createLocal("l0", null);
b.beginIfThenElse();
b.emitLoadArgument(0);
b.beginStoreLocal(result);
b.emitLoadLocal(l);
b.endStoreLocal();
b.beginStoreLocal(l);
b.emitLoadConstant(42L);
b.endStoreLocal();
b.endIfThenElse();
b.endBlock();
b.beginBlock();
l = b.createLocal("l1", null);
b.beginStoreLocal(result);
b.emitLoadLocal(l);
b.endStoreLocal();
b.endBlock();
b.beginReturn();
b.emitLoadLocal(result);
b.endReturn();
b.endRoot();
});
Object defaultLocal = this.run.getDefaultLocalValue();
if (defaultLocal == null) {
assertThrows(FrameSlotTypeException.class, () -> {
root.getCallTarget().call(false);
});
assertThrows(FrameSlotTypeException.class, () -> {
root.getCallTarget().call(true);
});
root.getBytecodeNode().setUncachedThreshold(0);
assertThrows(FrameSlotTypeException.class, () -> {
root.getCallTarget().call(false);
});
assertThrows(FrameSlotTypeException.class, () -> {
root.getCallTarget().call(true);
});
} else {
assertSame(defaultLocal, root.getCallTarget().call(true));
assertSame(defaultLocal, root.getCallTarget().call(false));
root.getBytecodeNode().setUncachedThreshold(0);
assertSame(defaultLocal, root.getCallTarget().call(true));
assertSame(defaultLocal, root.getCallTarget().call(false));
}
}
private <T extends BasicInterpreterBuilder> void assertParseFailure(BytecodeParser<T> parser) {
assertThrows(IllegalArgumentException.class, () -> parseNode("invalid", parser));
}
private static <T extends BasicInterpreterBuilder> BytecodeParser<T> siblingRootsTest(BiConsumer<T, BytecodeLocal> accessGenerator) {
return b -> {
b.beginRoot();
BytecodeLocal x = b.createLocal("x", null);
b.emitLoadNull();
b.endRoot();
b.beginRoot();
b.createLocal("y", null);
accessGenerator.accept(b, x);
b.endRoot();
};
}
private static <T extends BasicInterpreterBuilder> BytecodeParser<T> nestedRootsInnerAccessTest(BiConsumer<T, BytecodeLocal> accessGenerator) {
return b -> {
b.beginRoot();
BytecodeLocal x = b.createLocal("x", null);
b.beginRoot(); // inner
b.createLocal("y", null);
accessGenerator.accept(b, x);
b.endRoot();
b.endRoot();
};
}
private static <T extends BasicInterpreterBuilder> BytecodeParser<T> nestedRootsOuterAccessTest(BiConsumer<T, BytecodeLocal> accessGenerator) {
return b -> {
b.beginRoot();
b.createLocal("x", null);
b.beginRoot(); // inner
BytecodeLocal y = b.createLocal("y", null);
b.endRoot();
accessGenerator.accept(b, y);
b.endRoot();
};
}
private static <T extends BasicInterpreterBuilder> BytecodeParser<T> outOfScopeTest(BiConsumer<T, BytecodeLocal> accessGenerator) {
return b -> {
b.beginRoot();
b.beginBlock();
BytecodeLocal x = b.createLocal("x", null);
b.endBlock();
b.beginBlock();
accessGenerator.accept(b, x);
b.endBlock();
b.endRoot();
};
}
private static <T extends BasicInterpreterBuilder> void loadLocal(T b, BytecodeLocal local) {
b.emitLoadLocal(local);
}
private static <T extends BasicInterpreterBuilder> void storeLocal(T b, BytecodeLocal local) {
b.beginStoreLocal(local);
b.emitLoadNull();
b.endStoreLocal();
}
private static <T extends BasicInterpreterBuilder> void teeLocal(T b, BytecodeLocal local) {
b.beginTeeLocal(local);
b.emitLoadNull();
b.endTeeLocal();
}
private static <T extends BasicInterpreterBuilder> void teeLocalRange(T b, BytecodeLocal local) {
b.beginTeeLocalRange(new BytecodeLocal[]{local});
b.emitLoadNull();
b.endTeeLocalRange();
}
@Test
public void testInvalidLocalAccesses() {
assertParseFailure(siblingRootsTest(LocalsTest::loadLocal));
assertParseFailure(siblingRootsTest(LocalsTest::storeLocal));
assertParseFailure(siblingRootsTest(LocalsTest::teeLocal));
assertParseFailure(siblingRootsTest(LocalsTest::teeLocalRange));
assertParseFailure(nestedRootsInnerAccessTest(LocalsTest::loadLocal));
assertParseFailure(nestedRootsInnerAccessTest(LocalsTest::storeLocal));
assertParseFailure(nestedRootsInnerAccessTest(LocalsTest::teeLocal));
assertParseFailure(nestedRootsInnerAccessTest(LocalsTest::teeLocalRange));
assertParseFailure(nestedRootsOuterAccessTest(LocalsTest::loadLocal));
assertParseFailure(nestedRootsOuterAccessTest(LocalsTest::storeLocal));
assertParseFailure(nestedRootsOuterAccessTest(LocalsTest::teeLocal));
assertParseFailure(nestedRootsOuterAccessTest(LocalsTest::teeLocalRange));
if (run.hasBlockScoping()) {
assertParseFailure(outOfScopeTest(LocalsTest::loadLocal));
assertParseFailure(outOfScopeTest(LocalsTest::storeLocal));
assertParseFailure(outOfScopeTest(LocalsTest::teeLocal));
assertParseFailure(outOfScopeTest(LocalsTest::teeLocalRange));
}
}
private static <T extends BasicInterpreterBuilder> BytecodeParser<T> outOfScopeDifferentRootsTest(BiConsumer<T, BytecodeLocal> accessGenerator) {
return b -> {
b.beginRoot();
b.beginBlock();
BytecodeLocal x = b.createLocal("x", null);
b.endBlock();
b.beginBlock();
b.createLocal("y", null);
b.beginRoot(); // x is out of scope when inner root declared
accessGenerator.accept(b, x);
b.endRoot();
b.endBlock();
b.endRoot();
};
}
private static <T extends BasicInterpreterBuilder> void loadLocalMaterialized(T b, BytecodeLocal local) {
b.beginLoadLocalMaterialized(local);
b.emitMaterializeFrame(); // uses current frame
b.endLoadLocalMaterialized();
}
private static <T extends BasicInterpreterBuilder> void storeLocalMaterialized(T b, BytecodeLocal local) {
b.beginStoreLocalMaterialized(local);
b.emitMaterializeFrame(); // uses current frame
b.emitLoadNull();
b.endStoreLocalMaterialized();
}
@Test
public void testInvalidMaterializedLocalAccesses() {
assertParseFailure(siblingRootsTest(LocalsTest::loadLocalMaterialized));
assertParseFailure(siblingRootsTest(LocalsTest::storeLocalMaterialized));
// At run time we should fail if the wrong frame is passed.
BasicInterpreter root1 = createNodes(BytecodeConfig.DEFAULT, nestedRootsInnerAccessTest(LocalsTest::loadLocalMaterialized)).getNode(1);
assertThrows(IllegalArgumentException.class, () -> root1.getCallTarget().call());
BasicInterpreter root2 = createNodes(BytecodeConfig.DEFAULT, nestedRootsInnerAccessTest(LocalsTest::storeLocalMaterialized)).getNode(1);
assertThrows(IllegalArgumentException.class, () -> root2.getCallTarget().call());
if (run.hasBlockScoping()) {
assertParseFailure(outOfScopeTest(LocalsTest::loadLocalMaterialized));
assertParseFailure(outOfScopeTest(LocalsTest::storeLocalMaterialized));
assertParseFailure(outOfScopeDifferentRootsTest(LocalsTest::loadLocalMaterialized));
assertParseFailure(outOfScopeDifferentRootsTest(LocalsTest::storeLocalMaterialized));
if (run.storesBciInFrame()) {
// At run time we should fail if the local is not in scope.
BytecodeRootNodes<BasicInterpreter> roots = createNodes(BytecodeConfig.DEFAULT, b -> {
b.beginRoot();
b.beginBlock();
BytecodeLocal x = b.createLocal("x", null);
b.beginStoreLocal(x);
b.emitLoadConstant(42L);
b.endStoreLocal();
b.beginRoot(); // x is statically in scope
b.beginLoadLocalMaterialized(x);
b.emitLoadArgument(0);
b.endLoadLocalMaterialized();
b.endRoot();
b.beginRoot(); // x is statically in scope
b.beginStoreLocalMaterialized(x);
b.emitLoadArgument(0);
b.emitLoadNull();
b.endStoreLocalMaterialized();
b.endRoot();
b.endBlock();
b.beginBlock();
b.createLocal("y", null);
b.emitMaterializeFrame(); // x is out of scope in this frame
b.endBlock();
b.endRoot();
});
MaterializedFrame outerFrame = (MaterializedFrame) roots.getNode(0).getCallTarget().call();
assertThrows(IllegalArgumentException.class, () -> roots.getNode(1).getCallTarget().call(outerFrame));
assertThrows(IllegalArgumentException.class, () -> roots.getNode(2).getCallTarget().call(outerFrame));
}
}
}
}
|
apache/stanbol | 38,375 | entityhub/yard/solr/src/main/java/org/apache/stanbol/entityhub/yard/solr/model/IndexValueFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.stanbol.entityhub.yard.solr.model;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import org.apache.stanbol.entityhub.core.model.InMemoryValueFactory;
import org.apache.stanbol.entityhub.servicesapi.model.Reference;
import org.apache.stanbol.entityhub.servicesapi.model.Text;
import org.apache.stanbol.entityhub.servicesapi.model.ValueFactory;
import org.apache.stanbol.entityhub.yard.solr.defaults.IndexDataTypeEnum;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class provides methods to convert java objects to {@link IndexValue} and vice versa.
* <p>
* Implementation Note: This class needs to be thread save.
*
* @author Rupert Westenthaler
*/
public class IndexValueFactory {
private static Logger log = LoggerFactory.getLogger(IndexValueFactory.class);
private static ValueFactory valueFactory = InMemoryValueFactory.getInstance();
private static IndexValueFactory instance = new IndexValueFactory();
static {
// register the default converters
instance.registerConverter(new BigDecimalConverter());
instance.registerConverter(new BigIntegerConverter());
instance.registerConverter(new DateConverter());
instance.registerConverter(new BooleanConverter());
instance.registerConverter(new DoubleConverter());
instance.registerConverter(new FloatConverter());
instance.registerConverter(new IntegerConverter());
instance.registerConverter(new LongConverter());
instance.registerConverter(new ReferenceConverter(valueFactory));
instance.registerConverter(new StringConverter());
instance.registerConverter(new TextConverter(valueFactory));
}
/**
* Get a <code>IndexValueFactory</code>.
*
* @return the <code>IndexValueFactory</code> instance
*/
public static IndexValueFactory getInstance() {
return instance;
}
// TODO: add support for IndexTypeConverter
// private Map<IndexType,TypeConverter<?>> indexTypeConverters =
// new HashMap<IndexType, TypeConverter<?>>();
/**
* Holds the java class to {@link TypeConverter} mapping for all converters registered for a Java Class.
* <p>
* NOTE: this implementation distinguishes between classed and interfaces, because for Classes a simple
* get lookup in the Map can be used while for Interfaces we need to Iterate over the entries of the Map
* and check with {@link Class#isAssignableFrom(Class)}.
*/
private Map<Class<?>,TypeConverter<?>> javaClassConverters = Collections
.synchronizedMap(new HashMap<Class<?>,TypeConverter<?>>());
/**
* Holds the java interface to {@link TypeConverter} mappings for all converters registered for a Java
* Interface
* <p>
* NOTE: this implementation distinguishes between classed and interfaces, because for Classes a simple
* get lookup in the Map can be used while for Interfaces we need to Iterate over the entries of the Map
* and check with {@link Class#isAssignableFrom(Class)}.
*/
private Map<Class<?>,TypeConverter<?>> javaInterfaceConverters = new HashMap<Class<?>,TypeConverter<?>>();
/**
* Registers a converter to this factory. Note that only one converter per java type can be registered
*
* @see TypeConverter#getJavaType()
* @param converter
* the converter to be registered
*/
public void registerConverter(TypeConverter<?> converter) {
if (converter == null) {
return;
}
Class<?> javaType = converter.getJavaType();
if (javaType.isInterface()) {
// NOTE: To ensure thread save iterations over Entries of this Map
// create new map instance, add to the new instance and replace reference
// ... i know this is slow, but such calls are very uncommon
Map<Class<?>,TypeConverter<?>> javaInterfaceConverterMap = new HashMap<Class<?>,TypeConverter<?>>(
this.javaInterfaceConverters);
javaInterfaceConverterMap.put(javaType, converter);
// TODO: add support for IndexTypeConverter
this.javaInterfaceConverters = javaInterfaceConverterMap;
} else {
// there are no Iterations over this Map!
javaClassConverters.put(javaType, converter);
}
}
/**
* Removes the converter for the parsed java type
*
* @param type
* the java type
* @return the removed converter or <code>null</code> if none was registered for the parsed type.
*/
@SuppressWarnings("unchecked")
public <T> TypeConverter<T> removeConverter(Class<T> type) {
if (type == null) {
return null;
}
TypeConverter<T> converter;
if (type.isInterface()) {
if (javaInterfaceConverters.containsKey(type)) {
// create new map instance, remove to the converter and replace reference
// ... i know this is slow, but such calls are very uncommon
Map<Class<?>,TypeConverter<?>> javaInterfaceConverterMap = new HashMap<Class<?>,TypeConverter<?>>(
this.javaInterfaceConverters);
converter = (TypeConverter<T>) javaInterfaceConverterMap.remove(type);
this.javaInterfaceConverters = javaInterfaceConverterMap;
} else {
converter = null;
}
} else {
converter = (TypeConverter<T>) javaClassConverters.remove(type);
}
return converter;
}
/**
* Creates the value as used to index the parsed object
*
* @param value
* the value to be indexed
* @return the index representation of the parsed value
* @throws NoConverterException
* thrown if <code>value</code> is of an invalid type
* @throws IllegalArgumentException
* if the parsed value is null
*/
@SuppressWarnings("unchecked")
public IndexValue createIndexValue(Object value) throws NoConverterException, IllegalArgumentException {
if (value == null) {
throw new IllegalArgumentException("Parameter value MUST NOT be NULL!");
}
// first try to get the class and find a converter registered for a class
TypeConverter<Object> converter = (TypeConverter<Object>) javaClassConverters.get(value.getClass());
if (converter != null) {
return converter.createIndexValue(value);
}
// if not successful we need still to search for converters registered for interfaces
for (Entry<Class<?>,TypeConverter<?>> entry : javaInterfaceConverters.entrySet()) {
if (entry.getKey().isAssignableFrom(value.getClass())) {
return ((TypeConverter<Object>) entry.getValue()).createIndexValue(value);
}
}
throw new NoConverterException(value.getClass());
}
/**
* Converts a IndexValue instance to an instance of the specified class
*
* @param <T>
* @param type
* the <code>Class</code> of the returned object
* @param indexValue
* the index value instance
* @return a java object representing the value of the index value
* @throws NoConverterException
* thrown if <code>type</code> is unsupported
* @throws UnsupportedIndexTypeException
* if the {@link IndexDataType} of the parsed {@link IndexValue} is not supported by the
* registered converter
* @throws IllegalArgumentException
* if any of the two parameter is <code>null</code>
*/
public <T> T createValue(Class<T> type, IndexValue indexValue) throws NoConverterException,
UnsupportedIndexTypeException,
IllegalArgumentException {
return createValue(type, indexValue.getType(), indexValue.getType(), indexValue.getLanguage());
}
/**
* Converts a IndexValue instance to an instance of the specified class
*
* @param <T>
* @param javaType
* the requested java type
* @param indexType
* the index type
* @param indexValue
* the value in the index
* @param language
* the language of the value in the index
* @return a java object representing the value of the index value
* @throws NoConverterException
* thrown if <code>type</code> is unsupported
* @throws UnsupportedIndexTypeException
* if the {@link IndexDataType} of the parsed {@link IndexValue} is not supported by the
* registered converter
* @throws IllegalArgumentException
* if any of the two parameter is <code>null</code>
*/
@SuppressWarnings("unchecked")
public <T> T createValue(Class<T> javaType, IndexDataType indexType, Object indexValue, String language) throws NoConverterException,
UnsupportedIndexTypeException,
IllegalArgumentException {
if (javaType == null) {
throw new IllegalArgumentException("Parameter Class<T> type MUST NOT be NULL");
}
if (indexValue == null) {
throw new IllegalArgumentException("Parameter IndexValue MUST NOT be NULL");
}
// search interface converter map if the parsed type is an interface
TypeConverter<T> converter = (TypeConverter<T>) (javaType.isInterface() ? javaInterfaceConverters
.get(javaType) : javaClassConverters.get(javaType));
if (converter != null) {
return converter.createObject(indexType, indexValue, language);
} else {
throw new NoConverterException(javaType);
}
}
// TODO: add support for IndexTypeConverter
// /**
// * Converts a IndexValue instance to an java object. The type of the java
// * object.
// * @param indexValue the index value instance
// * @return a java object representing the value of the index value
// * @throws NoConverterException if no converter for the index value is registered
// */
// public Object createObject(IndexValue indexValue) throws NoConverterException {
//
// }
/*
* ==== Internal Classes for the default converter Implementations ====
*/
public static class DateConverter implements TypeConverter<Date> {
private static final DateTimeFormatter XML_DATE_TIME_FORMAT = ISODateTimeFormat.dateTime().withZone(
DateTimeZone.UTC);
private static final DateTimeFormatter XML_DATE_TIME_FORMAT_noMillis = ISODateTimeFormat
.dateTimeNoMillis().withZone(DateTimeZone.UTC);
public static final IndexDataType INDEX_TYPE = IndexDataTypeEnum.DATE.getIndexType();
@Override
public IndexValue createIndexValue(Date value) {
if(value == null){
return null;
}
return new IndexValue(XML_DATE_TIME_FORMAT.print(value.getTime()), INDEX_TYPE);
}
@Override
public Date createObject(IndexValue indexValue) {
if (indexValue == null) {
return null;
}
return createObject(indexValue.getType(), indexValue, indexValue.getLanguage());
}
@Override
public Class<Date> getJavaType() {
return Date.class;
}
@Override
public IndexDataType getIndexType() {
return INDEX_TYPE;
}
@Override
public Date createObject(IndexDataType type, Object value, String lang) throws UnsupportedIndexTypeException,
UnsupportedValueException {
if (type == null) {
throw new IllegalArgumentException("The parsed IndexDataType MUST NOT be null");
}
if (!type.equals(INDEX_TYPE)) {
throw new UnsupportedIndexTypeException(this, type);
}
if (value == null) {
return null;
}
if (value instanceof Date) {
return (Date) value;
} else if (value instanceof Calendar) {
return ((Calendar) value).getTime();
} else {
DateTime date;
try {
// NOTE: Solr only support UTC ... so we need to change the Timezone
date = XML_DATE_TIME_FORMAT.parseDateTime(value.toString());
} catch (IllegalArgumentException e) {
try {
date = XML_DATE_TIME_FORMAT_noMillis.parseDateTime(value.toString());
} catch (IllegalArgumentException e1) {
log.warn(
"Unable to parse Date/Time for Value "
+ value.toString()
+ " (use ISO date format (milliseconds optional))! -> no Date Mapping added!",
e1);
throw new UnsupportedValueException(this, type, value, e);
}
}
return date.toDate();
}
}
}
public static class BooleanConverter implements TypeConverter<Boolean> {
public static final IndexDataType INDEX_TYPE = IndexDataTypeEnum.BOOLEAN.getIndexType();
@Override
public IndexValue createIndexValue(Boolean value) {
if (value == null) {
return null;
}
return new IndexValue(value.toString(), INDEX_TYPE);
}
@Override
public Boolean createObject(IndexValue value) {
if (value == null) {
return null;
}
return createObject(value.getType(), value.getValue(), value.getLanguage());
}
@Override
public Class<Boolean> getJavaType() {
return Boolean.class;
}
@Override
public IndexDataType getIndexType() {
return INDEX_TYPE;
}
@Override
public Boolean createObject(IndexDataType type, Object value, String lang) throws UnsupportedIndexTypeException,
UnsupportedValueException {
if (type == null) {
throw new IllegalArgumentException("The parsed IndexDataType MUST NOT be null");
}
if (!type.equals(INDEX_TYPE)) {
throw new UnsupportedIndexTypeException(this, type);
}
if (value == null) {
return null;
}
if (value instanceof Boolean) {
return (Boolean) value;
} else {
return Boolean.valueOf(value.toString());
}
}
}
public static class StringConverter implements TypeConverter<String> {
public static final IndexDataType INDEX_TYPE = IndexDataTypeEnum.STR.getIndexType();
private boolean acceptAllIndexTypes;
public final boolean isAcceptAllIndexTypes() {
return acceptAllIndexTypes;
}
public final void setAcceptAllIndexTypes(boolean acceptAllIndexTypes) {
this.acceptAllIndexTypes = acceptAllIndexTypes;
}
public StringConverter() {
this(true);
}
public StringConverter(boolean acceptAllIndexTypes) {
this.acceptAllIndexTypes = acceptAllIndexTypes;
}
@Override
public IndexValue createIndexValue(String value) {
if (value == null || value.isEmpty()) {
return null;
}
return new IndexValue(value, INDEX_TYPE);
}
@Override
public String createObject(IndexValue value) {
if (value == null) {
return null;
}
// for now accept any IndexValue regardless of type
// if(!value.getType().equals(INDEX_TYPE)){
// new UnsupportedIndexTypeException(this, value);
// }
return value.getValue();
}
@Override
public Class<String> getJavaType() {
return String.class;
}
@Override
public IndexDataType getIndexType() {
return INDEX_TYPE;
}
@Override
public String createObject(IndexDataType type, Object value, String lang) throws NullPointerException {
if (type == null) {
throw new IllegalArgumentException("The parsed IndexDataType MUST NOT be null");
}
return value != null ? value.toString() : null;
}
}
public static class IntegerConverter implements TypeConverter<Integer> {
public static final IndexDataType INDEX_TYPE = IndexDataTypeEnum.INT.getIndexType();
private boolean acceptLong;
public final boolean isAcceptLong() {
return acceptLong;
}
public final void setAcceptLong(boolean acceptLong) {
this.acceptLong = acceptLong;
}
public IntegerConverter() {
this(true);
}
public IntegerConverter(boolean acceptLongIndexType) {
this.acceptLong = acceptLongIndexType;
}
@Override
public IndexValue createIndexValue(Integer value) {
if (value == null) {
return null;
}
return new IndexValue(value.toString(), INDEX_TYPE);
}
@Override
public Integer createObject(IndexValue value) {
if (value == null) {
return null;
}
return createObject(value.getType(), value.getValue(), value.getLanguage());
}
@Override
public Class<Integer> getJavaType() {
// TODO Auto-generated method stub
return Integer.class;
}
@Override
public IndexDataType getIndexType() {
return INDEX_TYPE;
}
@Override
public Integer createObject(IndexDataType type, Object value, String lang) throws UnsupportedIndexTypeException,
UnsupportedValueException,
NullPointerException {
if (type == null) {
throw new IllegalArgumentException("The parsed IndexDataType MUST NOT be null");
}
if (type.equals(INDEX_TYPE)) {
if (value == null) { // move in here to ensure returning UnsupportedIndexTypeException on
// wrong types
return null;
}
if (value instanceof Integer) {
return (Integer) value;
} else {
try {
return Integer.valueOf(value.toString());
} catch (NumberFormatException e) {
throw new UnsupportedValueException(this, type, value, e);
}
}
} else if (acceptLong && type.equals(IndexDataTypeEnum.LONG.getIndexType())) {
if (value == null) { // move in here to ensure returning UnsupportedIndexTypeException on
// wrong types
return null;
}
long longValue;
if (value instanceof Long) {
longValue = ((Long) value).longValue();
} else {
try {
longValue = Long.parseLong(value.toString());
} catch (NumberFormatException e) {
throw new UnsupportedValueException(this, type, value, e);
}
}
if (Integer.MAX_VALUE >= longValue && Integer.MIN_VALUE <= longValue) {
return Integer.valueOf((int) longValue);
} else {
// parsed long value outside of the int range
throw new UnsupportedValueException(
this,
type,
value,
new IllegalStateException(
"Unable to convert LONG Value to Integer, because the value is outside of the Integer Range!"));
}
} else {
throw new UnsupportedIndexTypeException(this, type);
}
}
}
public static class LongConverter implements TypeConverter<Long> {
public static final IndexDataType LONG_TYPE = IndexDataTypeEnum.LONG.getIndexType();
private static final IndexDataType INT_TYPE = IndexDataTypeEnum.INT.getIndexType();
@Override
public IndexValue createIndexValue(Long value) {
if (value == null) {
return null;
}
return new IndexValue(value.toString(), LONG_TYPE);
}
@Override
public Long createObject(IndexValue value) {
if (value == null) {
return null;
}
return createObject(value.getType(), value.getValue(), value.getLanguage());
}
@Override
public Class<Long> getJavaType() {
return Long.class;
}
@Override
public IndexDataType getIndexType() {
return LONG_TYPE;
}
@Override
public Long createObject(IndexDataType type, Object value, String lang) throws UnsupportedIndexTypeException,
UnsupportedValueException,
NullPointerException {
if (type == null) {
throw new IllegalArgumentException("The parsed IndexDataType MUST NOT be null");
}
if (type.equals(LONG_TYPE) || type.equals(INT_TYPE)) {
if (value == null) {
return null;
}
if (value instanceof Long) {
return (Long) value;
} else if (value instanceof Integer) {
return ((Integer) value).longValue();
} else {
try {
return new Long(value.toString());
} catch (NumberFormatException e) {
throw new UnsupportedValueException(this, type, value, e);
}
}
} else {
throw new UnsupportedIndexTypeException(this, type);
}
}
}
public static class DoubleConverter implements TypeConverter<Double> {
public static final IndexDataType INDEX_TYPE = IndexDataTypeEnum.DOUBLE.getIndexType();
private static final Set<IndexDataType> SUPPORTED = new HashSet<IndexDataType>(Arrays.asList(
IndexDataTypeEnum.FLOAT.getIndexType(), IndexDataTypeEnum.INT.getIndexType(),
IndexDataTypeEnum.LONG.getIndexType(), INDEX_TYPE));
@Override
public IndexValue createIndexValue(Double value) {
if (value == null) {
return null;
}
return new IndexValue(value.toString(), INDEX_TYPE);
}
@Override
public Double createObject(IndexValue value) {
if (value == null) {
return null;
}
return createObject(value.getType(), value.getValue(), value.getLanguage());
}
@Override
public Class<Double> getJavaType() {
return Double.class;
}
@Override
public IndexDataType getIndexType() {
return INDEX_TYPE;
}
@Override
public Double createObject(IndexDataType type, Object value, String lang) throws UnsupportedIndexTypeException,
UnsupportedValueException,
NullPointerException {
if (type == null) {
throw new IllegalArgumentException("The parsed IndexDataType MUST NOT be null");
}
if (SUPPORTED.contains(type)) {
if (value == null) {
return null;
}
if (value instanceof Double) {
return (Double) value;
} else if (value instanceof Float) {
return ((Float) value).doubleValue();
} else {
try {
return new Double(value.toString());
} catch (NumberFormatException e) {
throw new UnsupportedValueException(this, type, value, e);
}
}
} else {
throw new UnsupportedIndexTypeException(this, type);
}
}
}
public static class FloatConverter implements TypeConverter<Float> {
public static final IndexDataType INDEX_TYPE = IndexDataTypeEnum.FLOAT.getIndexType();
private static final Collection<IndexDataType> DOUBLE_LONG_TYPES = Arrays.asList(
IndexDataTypeEnum.LONG.getIndexType(), IndexDataTypeEnum.DOUBLE.getIndexType());
private final Set<IndexDataType> supported = Collections
.synchronizedSet(new HashSet<IndexDataType>());
public FloatConverter() {
this(true);
}
public FloatConverter(boolean acceptDoubleAndLongIndexType) {
supported.addAll(Arrays.asList(IndexDataTypeEnum.INT.getIndexType(), INDEX_TYPE));
setAcceptDoubleAndLongIndexTypes(acceptDoubleAndLongIndexType);
}
public boolean isAcceptDoubleAndLongIndexTypes() {
return supported.containsAll(DOUBLE_LONG_TYPES);
}
public final void setAcceptDoubleAndLongIndexTypes(boolean state) {
if (state) {
supported.addAll(DOUBLE_LONG_TYPES);
} else {
supported.removeAll(DOUBLE_LONG_TYPES);
}
}
@Override
public IndexValue createIndexValue(Float value) {
if (value == null) {
return null;
}
return new IndexValue(value.toString(), INDEX_TYPE);
}
@Override
public Float createObject(IndexValue value) {
if (value == null) {
return null;
}
return createObject(value.getType(), value.getValue(), value.getLanguage());
}
@Override
public Class<Float> getJavaType() {
return Float.class;
}
@Override
public IndexDataType getIndexType() {
return INDEX_TYPE;
}
@Override
public Float createObject(IndexDataType type, Object value, String lang) throws UnsupportedIndexTypeException,
UnsupportedValueException,
NullPointerException {
if (type == null) {
throw new IllegalArgumentException("The parsed IndexDataType MUST NOT be null");
}
if (supported.contains(type)) {
if (value == null) {
return null;
}
if (value instanceof Float) {
return (Float) value;
} else if (value instanceof Double) {
return ((Double) value).floatValue();
} else {
try {
return new Float(value.toString());
} catch (NumberFormatException e) {
throw new UnsupportedValueException(this, type, value, e);
}
}
} else {
throw new UnsupportedIndexTypeException(this, type);
}
}
}
public static class BigIntegerConverter implements TypeConverter<BigInteger> {
public static final IndexDataType INDEX_TYPE = IndexDataTypeEnum.LONG.getIndexType();
private static final IndexDataType INT_TYPE = IndexDataTypeEnum.INT.getIndexType();
@Override
public IndexValue createIndexValue(BigInteger value) {
if (value == null) {
return null;
}
return new IndexValue(value.toString(), INDEX_TYPE);
}
@Override
public BigInteger createObject(IndexValue value) {
if (value == null) {
return null;
}
return createObject(value.getType(), value.getValue(), value.getLanguage());
}
@Override
public Class<BigInteger> getJavaType() {
return BigInteger.class;
}
@Override
public IndexDataType getIndexType() {
return INDEX_TYPE;
}
@Override
public BigInteger createObject(IndexDataType type, Object value, String lang) throws UnsupportedIndexTypeException,
UnsupportedValueException,
NullPointerException {
if (type == null) {
throw new IllegalArgumentException("The parsed IndexDataType MUST NOT be null");
}
if (type.equals(INDEX_TYPE) || type.equals(INT_TYPE)) {
if (value == null) {
return null;
}
try {
return new BigInteger(value.toString());
} catch (NumberFormatException e) {
throw new UnsupportedValueException(this, type, value, e);
}
} else {
throw new UnsupportedIndexTypeException(this, type);
}
}
}
public static class BigDecimalConverter implements TypeConverter<BigDecimal> {
public static final IndexDataType INDEX_TYPE = IndexDataTypeEnum.DOUBLE.getIndexType();
private static final Set<IndexDataType> SUPPORTED = new HashSet<IndexDataType>(Arrays.asList(
IndexDataTypeEnum.FLOAT.getIndexType(), IndexDataTypeEnum.INT.getIndexType(),
IndexDataTypeEnum.LONG.getIndexType(), INDEX_TYPE));
@Override
public IndexValue createIndexValue(BigDecimal value) {
if (value == null) {
return null;
}
return new IndexValue(value.toString(), INDEX_TYPE);
}
@Override
public BigDecimal createObject(IndexValue value) {
return createObject(value.getType(), value.getValue(), value.getLanguage());
}
@Override
public BigDecimal createObject(IndexDataType type, Object value, String lang) throws UnsupportedIndexTypeException,
UnsupportedValueException,
NullPointerException {
if (type == null) {
throw new IllegalArgumentException("The parsed IndexDataType MUST NOT be null");
}
if (SUPPORTED.contains(type)) {
if (value == null) {
return null;
}
try {
return new BigDecimal(value.toString());
} catch (NumberFormatException e) {
throw new UnsupportedValueException(this, type, value, e);
}
} else {
throw new UnsupportedIndexTypeException(type);
}
}
@Override
public Class<BigDecimal> getJavaType() {
return BigDecimal.class;
}
@Override
public IndexDataType getIndexType() {
return INDEX_TYPE;
}
}
public static class TextConverter implements TypeConverter<Text> {
public static final IndexDataType INDEX_TYPE = IndexDataTypeEnum.TXT.getIndexType();
private static final IndexDataType STRING_TYPE = IndexDataTypeEnum.STR.getIndexType();
private final ValueFactory valueFactory;
public TextConverter(ValueFactory valueFactory) {
if (valueFactory == null) {
throw new IllegalArgumentException("Parameter ValueFactory MUST NOT be NULL!");
}
this.valueFactory = valueFactory;
}
@Override
public IndexValue createIndexValue(Text value) {
if (value == null) {
return null;
}
return new IndexValue(value.getText(), INDEX_TYPE, value.getLanguage());
}
@Override
public Text createObject(IndexValue value) throws UnsupportedIndexTypeException {
if (value == null) {
return null;
}
return createObject(value.getType(), value.getValue(), value.getLanguage());
}
@Override
public Class<Text> getJavaType() {
return Text.class;
}
@Override
public IndexDataType getIndexType() {
return INDEX_TYPE;
}
@Override
public Text createObject(IndexDataType type, Object value, String lang) throws UnsupportedIndexTypeException,
UnsupportedValueException,
NullPointerException {
if (type == null) {
throw new IllegalArgumentException("The parsed IndexDataType MUST NOT be null");
}
if (type.equals(INDEX_TYPE) || type.equals(STRING_TYPE)) {
if (value == null) {
return null;
}
return valueFactory.createText(value.toString(), lang);
} else {
throw new UnsupportedIndexTypeException(this, type);
}
}
}
public static class ReferenceConverter implements TypeConverter<Reference> {
public static final IndexDataType INDEX_TYPE = IndexDataTypeEnum.REF.getIndexType();
private final ValueFactory valueFactory;
public ReferenceConverter(ValueFactory valueFactory) {
if (valueFactory == null) {
throw new IllegalArgumentException("Parameter ValueFactory MUST NOT be NULL!");
}
this.valueFactory = valueFactory;
}
@Override
public IndexValue createIndexValue(Reference value) {
if (value == null || value.getReference() == null || value.getReference().isEmpty()) {
return null;
}
return new IndexValue(value.getReference(), INDEX_TYPE);
}
@Override
public Reference createObject(IndexValue value) throws UnsupportedIndexTypeException {
if (value == null) {
return null;
}
return createObject(value.getType(), value.getValue(), value.getLanguage());
}
@Override
public Class<Reference> getJavaType() {
return Reference.class;
}
@Override
public IndexDataType getIndexType() {
return INDEX_TYPE;
}
@Override
public Reference createObject(IndexDataType type, Object value, String lang) throws UnsupportedIndexTypeException,
UnsupportedValueException,
NullPointerException {
if (type == null) {
throw new IllegalArgumentException("The parsed IndexDataType MUST NOT be null");
}
if (type.equals(INDEX_TYPE)) {
if (value == null) {
return null;
}
return valueFactory.createReference(value.toString());
} else {
throw new UnsupportedIndexTypeException(this, type);
}
}
}
}
|
googleapis/google-cloud-java | 37,990 | java-asset/proto-google-cloud-asset-v1/src/main/java/com/google/cloud/asset/v1/Tag.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/asset/v1/assets.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.asset.v1;
/**
*
*
* <pre>
* The key and value for a
* [tag](https://cloud.google.com/resource-manager/docs/tags/tags-overview).
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1.Tag}
*/
public final class Tag extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.asset.v1.Tag)
TagOrBuilder {
private static final long serialVersionUID = 0L;
// Use Tag.newBuilder() to construct.
private Tag(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Tag() {
tagKey_ = "";
tagKeyId_ = "";
tagValue_ = "";
tagValueId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Tag();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1.AssetProto
.internal_static_google_cloud_asset_v1_Tag_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1.AssetProto
.internal_static_google_cloud_asset_v1_Tag_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1.Tag.class, com.google.cloud.asset.v1.Tag.Builder.class);
}
private int bitField0_;
public static final int TAG_KEY_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object tagKey_ = "";
/**
*
*
* <pre>
* TagKey namespaced name, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_key = 1;</code>
*
* @return Whether the tagKey field is set.
*/
@java.lang.Override
public boolean hasTagKey() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* TagKey namespaced name, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_key = 1;</code>
*
* @return The tagKey.
*/
@java.lang.Override
public java.lang.String getTagKey() {
java.lang.Object ref = tagKey_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tagKey_ = s;
return s;
}
}
/**
*
*
* <pre>
* TagKey namespaced name, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_key = 1;</code>
*
* @return The bytes for tagKey.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTagKeyBytes() {
java.lang.Object ref = tagKey_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tagKey_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TAG_KEY_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object tagKeyId_ = "";
/**
*
*
* <pre>
* TagKey ID, in the format of tagKeys/{TAG_KEY_ID}.
* </pre>
*
* <code>optional string tag_key_id = 2;</code>
*
* @return Whether the tagKeyId field is set.
*/
@java.lang.Override
public boolean hasTagKeyId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* TagKey ID, in the format of tagKeys/{TAG_KEY_ID}.
* </pre>
*
* <code>optional string tag_key_id = 2;</code>
*
* @return The tagKeyId.
*/
@java.lang.Override
public java.lang.String getTagKeyId() {
java.lang.Object ref = tagKeyId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tagKeyId_ = s;
return s;
}
}
/**
*
*
* <pre>
* TagKey ID, in the format of tagKeys/{TAG_KEY_ID}.
* </pre>
*
* <code>optional string tag_key_id = 2;</code>
*
* @return The bytes for tagKeyId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTagKeyIdBytes() {
java.lang.Object ref = tagKeyId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tagKeyId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TAG_VALUE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object tagValue_ = "";
/**
*
*
* <pre>
* TagValue namespaced name, in the format of
* {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_value = 3;</code>
*
* @return Whether the tagValue field is set.
*/
@java.lang.Override
public boolean hasTagValue() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* TagValue namespaced name, in the format of
* {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_value = 3;</code>
*
* @return The tagValue.
*/
@java.lang.Override
public java.lang.String getTagValue() {
java.lang.Object ref = tagValue_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tagValue_ = s;
return s;
}
}
/**
*
*
* <pre>
* TagValue namespaced name, in the format of
* {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_value = 3;</code>
*
* @return The bytes for tagValue.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTagValueBytes() {
java.lang.Object ref = tagValue_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tagValue_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TAG_VALUE_ID_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object tagValueId_ = "";
/**
*
*
* <pre>
* TagValue ID, in the format of tagValues/{TAG_VALUE_ID}.
* </pre>
*
* <code>optional string tag_value_id = 4;</code>
*
* @return Whether the tagValueId field is set.
*/
@java.lang.Override
public boolean hasTagValueId() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
*
*
* <pre>
* TagValue ID, in the format of tagValues/{TAG_VALUE_ID}.
* </pre>
*
* <code>optional string tag_value_id = 4;</code>
*
* @return The tagValueId.
*/
@java.lang.Override
public java.lang.String getTagValueId() {
java.lang.Object ref = tagValueId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tagValueId_ = s;
return s;
}
}
/**
*
*
* <pre>
* TagValue ID, in the format of tagValues/{TAG_VALUE_ID}.
* </pre>
*
* <code>optional string tag_value_id = 4;</code>
*
* @return The bytes for tagValueId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTagValueIdBytes() {
java.lang.Object ref = tagValueId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tagValueId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, tagKey_);
}
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, tagKeyId_);
}
if (((bitField0_ & 0x00000004) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, tagValue_);
}
if (((bitField0_ & 0x00000008) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, tagValueId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, tagKey_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, tagKeyId_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, tagValue_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, tagValueId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.asset.v1.Tag)) {
return super.equals(obj);
}
com.google.cloud.asset.v1.Tag other = (com.google.cloud.asset.v1.Tag) obj;
if (hasTagKey() != other.hasTagKey()) return false;
if (hasTagKey()) {
if (!getTagKey().equals(other.getTagKey())) return false;
}
if (hasTagKeyId() != other.hasTagKeyId()) return false;
if (hasTagKeyId()) {
if (!getTagKeyId().equals(other.getTagKeyId())) return false;
}
if (hasTagValue() != other.hasTagValue()) return false;
if (hasTagValue()) {
if (!getTagValue().equals(other.getTagValue())) return false;
}
if (hasTagValueId() != other.hasTagValueId()) return false;
if (hasTagValueId()) {
if (!getTagValueId().equals(other.getTagValueId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTagKey()) {
hash = (37 * hash) + TAG_KEY_FIELD_NUMBER;
hash = (53 * hash) + getTagKey().hashCode();
}
if (hasTagKeyId()) {
hash = (37 * hash) + TAG_KEY_ID_FIELD_NUMBER;
hash = (53 * hash) + getTagKeyId().hashCode();
}
if (hasTagValue()) {
hash = (37 * hash) + TAG_VALUE_FIELD_NUMBER;
hash = (53 * hash) + getTagValue().hashCode();
}
if (hasTagValueId()) {
hash = (37 * hash) + TAG_VALUE_ID_FIELD_NUMBER;
hash = (53 * hash) + getTagValueId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.asset.v1.Tag parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.Tag parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.Tag parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.Tag parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.Tag parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.Tag parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.Tag parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.Tag parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1.Tag parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.Tag parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1.Tag parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.Tag parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.asset.v1.Tag prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The key and value for a
* [tag](https://cloud.google.com/resource-manager/docs/tags/tags-overview).
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1.Tag}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.asset.v1.Tag)
com.google.cloud.asset.v1.TagOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1.AssetProto
.internal_static_google_cloud_asset_v1_Tag_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1.AssetProto
.internal_static_google_cloud_asset_v1_Tag_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1.Tag.class, com.google.cloud.asset.v1.Tag.Builder.class);
}
// Construct using com.google.cloud.asset.v1.Tag.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
tagKey_ = "";
tagKeyId_ = "";
tagValue_ = "";
tagValueId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.asset.v1.AssetProto
.internal_static_google_cloud_asset_v1_Tag_descriptor;
}
@java.lang.Override
public com.google.cloud.asset.v1.Tag getDefaultInstanceForType() {
return com.google.cloud.asset.v1.Tag.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.asset.v1.Tag build() {
com.google.cloud.asset.v1.Tag result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.asset.v1.Tag buildPartial() {
com.google.cloud.asset.v1.Tag result = new com.google.cloud.asset.v1.Tag(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.asset.v1.Tag result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.tagKey_ = tagKey_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.tagKeyId_ = tagKeyId_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.tagValue_ = tagValue_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.tagValueId_ = tagValueId_;
to_bitField0_ |= 0x00000008;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.asset.v1.Tag) {
return mergeFrom((com.google.cloud.asset.v1.Tag) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.asset.v1.Tag other) {
if (other == com.google.cloud.asset.v1.Tag.getDefaultInstance()) return this;
if (other.hasTagKey()) {
tagKey_ = other.tagKey_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasTagKeyId()) {
tagKeyId_ = other.tagKeyId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasTagValue()) {
tagValue_ = other.tagValue_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.hasTagValueId()) {
tagValueId_ = other.tagValueId_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
tagKey_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
tagKeyId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
tagValue_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
tagValueId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object tagKey_ = "";
/**
*
*
* <pre>
* TagKey namespaced name, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_key = 1;</code>
*
* @return Whether the tagKey field is set.
*/
public boolean hasTagKey() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* TagKey namespaced name, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_key = 1;</code>
*
* @return The tagKey.
*/
public java.lang.String getTagKey() {
java.lang.Object ref = tagKey_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tagKey_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* TagKey namespaced name, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_key = 1;</code>
*
* @return The bytes for tagKey.
*/
public com.google.protobuf.ByteString getTagKeyBytes() {
java.lang.Object ref = tagKey_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tagKey_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* TagKey namespaced name, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_key = 1;</code>
*
* @param value The tagKey to set.
* @return This builder for chaining.
*/
public Builder setTagKey(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tagKey_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* TagKey namespaced name, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_key = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearTagKey() {
tagKey_ = getDefaultInstance().getTagKey();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* TagKey namespaced name, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_key = 1;</code>
*
* @param value The bytes for tagKey to set.
* @return This builder for chaining.
*/
public Builder setTagKeyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tagKey_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object tagKeyId_ = "";
/**
*
*
* <pre>
* TagKey ID, in the format of tagKeys/{TAG_KEY_ID}.
* </pre>
*
* <code>optional string tag_key_id = 2;</code>
*
* @return Whether the tagKeyId field is set.
*/
public boolean hasTagKeyId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* TagKey ID, in the format of tagKeys/{TAG_KEY_ID}.
* </pre>
*
* <code>optional string tag_key_id = 2;</code>
*
* @return The tagKeyId.
*/
public java.lang.String getTagKeyId() {
java.lang.Object ref = tagKeyId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tagKeyId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* TagKey ID, in the format of tagKeys/{TAG_KEY_ID}.
* </pre>
*
* <code>optional string tag_key_id = 2;</code>
*
* @return The bytes for tagKeyId.
*/
public com.google.protobuf.ByteString getTagKeyIdBytes() {
java.lang.Object ref = tagKeyId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tagKeyId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* TagKey ID, in the format of tagKeys/{TAG_KEY_ID}.
* </pre>
*
* <code>optional string tag_key_id = 2;</code>
*
* @param value The tagKeyId to set.
* @return This builder for chaining.
*/
public Builder setTagKeyId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tagKeyId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* TagKey ID, in the format of tagKeys/{TAG_KEY_ID}.
* </pre>
*
* <code>optional string tag_key_id = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearTagKeyId() {
tagKeyId_ = getDefaultInstance().getTagKeyId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* TagKey ID, in the format of tagKeys/{TAG_KEY_ID}.
* </pre>
*
* <code>optional string tag_key_id = 2;</code>
*
* @param value The bytes for tagKeyId to set.
* @return This builder for chaining.
*/
public Builder setTagKeyIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tagKeyId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object tagValue_ = "";
/**
*
*
* <pre>
* TagValue namespaced name, in the format of
* {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_value = 3;</code>
*
* @return Whether the tagValue field is set.
*/
public boolean hasTagValue() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* TagValue namespaced name, in the format of
* {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_value = 3;</code>
*
* @return The tagValue.
*/
public java.lang.String getTagValue() {
java.lang.Object ref = tagValue_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tagValue_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* TagValue namespaced name, in the format of
* {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_value = 3;</code>
*
* @return The bytes for tagValue.
*/
public com.google.protobuf.ByteString getTagValueBytes() {
java.lang.Object ref = tagValue_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tagValue_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* TagValue namespaced name, in the format of
* {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_value = 3;</code>
*
* @param value The tagValue to set.
* @return This builder for chaining.
*/
public Builder setTagValue(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tagValue_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* TagValue namespaced name, in the format of
* {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_value = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearTagValue() {
tagValue_ = getDefaultInstance().getTagValue();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* TagValue namespaced name, in the format of
* {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}.
* </pre>
*
* <code>optional string tag_value = 3;</code>
*
* @param value The bytes for tagValue to set.
* @return This builder for chaining.
*/
public Builder setTagValueBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tagValue_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object tagValueId_ = "";
/**
*
*
* <pre>
* TagValue ID, in the format of tagValues/{TAG_VALUE_ID}.
* </pre>
*
* <code>optional string tag_value_id = 4;</code>
*
* @return Whether the tagValueId field is set.
*/
public boolean hasTagValueId() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
*
*
* <pre>
* TagValue ID, in the format of tagValues/{TAG_VALUE_ID}.
* </pre>
*
* <code>optional string tag_value_id = 4;</code>
*
* @return The tagValueId.
*/
public java.lang.String getTagValueId() {
java.lang.Object ref = tagValueId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
tagValueId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* TagValue ID, in the format of tagValues/{TAG_VALUE_ID}.
* </pre>
*
* <code>optional string tag_value_id = 4;</code>
*
* @return The bytes for tagValueId.
*/
public com.google.protobuf.ByteString getTagValueIdBytes() {
java.lang.Object ref = tagValueId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
tagValueId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* TagValue ID, in the format of tagValues/{TAG_VALUE_ID}.
* </pre>
*
* <code>optional string tag_value_id = 4;</code>
*
* @param value The tagValueId to set.
* @return This builder for chaining.
*/
public Builder setTagValueId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
tagValueId_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* TagValue ID, in the format of tagValues/{TAG_VALUE_ID}.
* </pre>
*
* <code>optional string tag_value_id = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearTagValueId() {
tagValueId_ = getDefaultInstance().getTagValueId();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* TagValue ID, in the format of tagValues/{TAG_VALUE_ID}.
* </pre>
*
* <code>optional string tag_value_id = 4;</code>
*
* @param value The bytes for tagValueId to set.
* @return This builder for chaining.
*/
public Builder setTagValueIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
tagValueId_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.asset.v1.Tag)
}
// @@protoc_insertion_point(class_scope:google.cloud.asset.v1.Tag)
private static final com.google.cloud.asset.v1.Tag DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.asset.v1.Tag();
}
public static com.google.cloud.asset.v1.Tag getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Tag> PARSER =
new com.google.protobuf.AbstractParser<Tag>() {
@java.lang.Override
public Tag parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Tag> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Tag> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.asset.v1.Tag getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,180 | java-redis-cluster/proto-google-cloud-redis-cluster-v1/src/main/java/com/google/cloud/redis/cluster/v1/ConnectionDetail.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/redis/cluster/v1/cloud_redis_cluster.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.redis.cluster.v1;
/**
*
*
* <pre>
* Detailed information of each PSC connection.
* </pre>
*
* Protobuf type {@code google.cloud.redis.cluster.v1.ConnectionDetail}
*/
public final class ConnectionDetail extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.redis.cluster.v1.ConnectionDetail)
ConnectionDetailOrBuilder {
private static final long serialVersionUID = 0L;
// Use ConnectionDetail.newBuilder() to construct.
private ConnectionDetail(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ConnectionDetail() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ConnectionDetail();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.redis.cluster.v1.CloudRedisClusterProto
.internal_static_google_cloud_redis_cluster_v1_ConnectionDetail_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.redis.cluster.v1.CloudRedisClusterProto
.internal_static_google_cloud_redis_cluster_v1_ConnectionDetail_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.redis.cluster.v1.ConnectionDetail.class,
com.google.cloud.redis.cluster.v1.ConnectionDetail.Builder.class);
}
private int connectionCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object connection_;
public enum ConnectionCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
PSC_AUTO_CONNECTION(1),
PSC_CONNECTION(2),
CONNECTION_NOT_SET(0);
private final int value;
private ConnectionCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ConnectionCase valueOf(int value) {
return forNumber(value);
}
public static ConnectionCase forNumber(int value) {
switch (value) {
case 1:
return PSC_AUTO_CONNECTION;
case 2:
return PSC_CONNECTION;
case 0:
return CONNECTION_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public ConnectionCase getConnectionCase() {
return ConnectionCase.forNumber(connectionCase_);
}
public static final int PSC_AUTO_CONNECTION_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*
* @return Whether the pscAutoConnection field is set.
*/
@java.lang.Override
public boolean hasPscAutoConnection() {
return connectionCase_ == 1;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*
* @return The pscAutoConnection.
*/
@java.lang.Override
public com.google.cloud.redis.cluster.v1.PscAutoConnection getPscAutoConnection() {
if (connectionCase_ == 1) {
return (com.google.cloud.redis.cluster.v1.PscAutoConnection) connection_;
}
return com.google.cloud.redis.cluster.v1.PscAutoConnection.getDefaultInstance();
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*/
@java.lang.Override
public com.google.cloud.redis.cluster.v1.PscAutoConnectionOrBuilder
getPscAutoConnectionOrBuilder() {
if (connectionCase_ == 1) {
return (com.google.cloud.redis.cluster.v1.PscAutoConnection) connection_;
}
return com.google.cloud.redis.cluster.v1.PscAutoConnection.getDefaultInstance();
}
public static final int PSC_CONNECTION_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*
* @return Whether the pscConnection field is set.
*/
@java.lang.Override
public boolean hasPscConnection() {
return connectionCase_ == 2;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*
* @return The pscConnection.
*/
@java.lang.Override
public com.google.cloud.redis.cluster.v1.PscConnection getPscConnection() {
if (connectionCase_ == 2) {
return (com.google.cloud.redis.cluster.v1.PscConnection) connection_;
}
return com.google.cloud.redis.cluster.v1.PscConnection.getDefaultInstance();
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*/
@java.lang.Override
public com.google.cloud.redis.cluster.v1.PscConnectionOrBuilder getPscConnectionOrBuilder() {
if (connectionCase_ == 2) {
return (com.google.cloud.redis.cluster.v1.PscConnection) connection_;
}
return com.google.cloud.redis.cluster.v1.PscConnection.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (connectionCase_ == 1) {
output.writeMessage(1, (com.google.cloud.redis.cluster.v1.PscAutoConnection) connection_);
}
if (connectionCase_ == 2) {
output.writeMessage(2, (com.google.cloud.redis.cluster.v1.PscConnection) connection_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (connectionCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, (com.google.cloud.redis.cluster.v1.PscAutoConnection) connection_);
}
if (connectionCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, (com.google.cloud.redis.cluster.v1.PscConnection) connection_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.redis.cluster.v1.ConnectionDetail)) {
return super.equals(obj);
}
com.google.cloud.redis.cluster.v1.ConnectionDetail other =
(com.google.cloud.redis.cluster.v1.ConnectionDetail) obj;
if (!getConnectionCase().equals(other.getConnectionCase())) return false;
switch (connectionCase_) {
case 1:
if (!getPscAutoConnection().equals(other.getPscAutoConnection())) return false;
break;
case 2:
if (!getPscConnection().equals(other.getPscConnection())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (connectionCase_) {
case 1:
hash = (37 * hash) + PSC_AUTO_CONNECTION_FIELD_NUMBER;
hash = (53 * hash) + getPscAutoConnection().hashCode();
break;
case 2:
hash = (37 * hash) + PSC_CONNECTION_FIELD_NUMBER;
hash = (53 * hash) + getPscConnection().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.redis.cluster.v1.ConnectionDetail prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Detailed information of each PSC connection.
* </pre>
*
* Protobuf type {@code google.cloud.redis.cluster.v1.ConnectionDetail}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.redis.cluster.v1.ConnectionDetail)
com.google.cloud.redis.cluster.v1.ConnectionDetailOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.redis.cluster.v1.CloudRedisClusterProto
.internal_static_google_cloud_redis_cluster_v1_ConnectionDetail_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.redis.cluster.v1.CloudRedisClusterProto
.internal_static_google_cloud_redis_cluster_v1_ConnectionDetail_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.redis.cluster.v1.ConnectionDetail.class,
com.google.cloud.redis.cluster.v1.ConnectionDetail.Builder.class);
}
// Construct using com.google.cloud.redis.cluster.v1.ConnectionDetail.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (pscAutoConnectionBuilder_ != null) {
pscAutoConnectionBuilder_.clear();
}
if (pscConnectionBuilder_ != null) {
pscConnectionBuilder_.clear();
}
connectionCase_ = 0;
connection_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.redis.cluster.v1.CloudRedisClusterProto
.internal_static_google_cloud_redis_cluster_v1_ConnectionDetail_descriptor;
}
@java.lang.Override
public com.google.cloud.redis.cluster.v1.ConnectionDetail getDefaultInstanceForType() {
return com.google.cloud.redis.cluster.v1.ConnectionDetail.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.redis.cluster.v1.ConnectionDetail build() {
com.google.cloud.redis.cluster.v1.ConnectionDetail result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.redis.cluster.v1.ConnectionDetail buildPartial() {
com.google.cloud.redis.cluster.v1.ConnectionDetail result =
new com.google.cloud.redis.cluster.v1.ConnectionDetail(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.redis.cluster.v1.ConnectionDetail result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(com.google.cloud.redis.cluster.v1.ConnectionDetail result) {
result.connectionCase_ = connectionCase_;
result.connection_ = this.connection_;
if (connectionCase_ == 1 && pscAutoConnectionBuilder_ != null) {
result.connection_ = pscAutoConnectionBuilder_.build();
}
if (connectionCase_ == 2 && pscConnectionBuilder_ != null) {
result.connection_ = pscConnectionBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.redis.cluster.v1.ConnectionDetail) {
return mergeFrom((com.google.cloud.redis.cluster.v1.ConnectionDetail) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.redis.cluster.v1.ConnectionDetail other) {
if (other == com.google.cloud.redis.cluster.v1.ConnectionDetail.getDefaultInstance())
return this;
switch (other.getConnectionCase()) {
case PSC_AUTO_CONNECTION:
{
mergePscAutoConnection(other.getPscAutoConnection());
break;
}
case PSC_CONNECTION:
{
mergePscConnection(other.getPscConnection());
break;
}
case CONNECTION_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getPscAutoConnectionFieldBuilder().getBuilder(), extensionRegistry);
connectionCase_ = 1;
break;
} // case 10
case 18:
{
input.readMessage(getPscConnectionFieldBuilder().getBuilder(), extensionRegistry);
connectionCase_ = 2;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int connectionCase_ = 0;
private java.lang.Object connection_;
public ConnectionCase getConnectionCase() {
return ConnectionCase.forNumber(connectionCase_);
}
public Builder clearConnection() {
connectionCase_ = 0;
connection_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.redis.cluster.v1.PscAutoConnection,
com.google.cloud.redis.cluster.v1.PscAutoConnection.Builder,
com.google.cloud.redis.cluster.v1.PscAutoConnectionOrBuilder>
pscAutoConnectionBuilder_;
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*
* @return Whether the pscAutoConnection field is set.
*/
@java.lang.Override
public boolean hasPscAutoConnection() {
return connectionCase_ == 1;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*
* @return The pscAutoConnection.
*/
@java.lang.Override
public com.google.cloud.redis.cluster.v1.PscAutoConnection getPscAutoConnection() {
if (pscAutoConnectionBuilder_ == null) {
if (connectionCase_ == 1) {
return (com.google.cloud.redis.cluster.v1.PscAutoConnection) connection_;
}
return com.google.cloud.redis.cluster.v1.PscAutoConnection.getDefaultInstance();
} else {
if (connectionCase_ == 1) {
return pscAutoConnectionBuilder_.getMessage();
}
return com.google.cloud.redis.cluster.v1.PscAutoConnection.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*/
public Builder setPscAutoConnection(com.google.cloud.redis.cluster.v1.PscAutoConnection value) {
if (pscAutoConnectionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
connection_ = value;
onChanged();
} else {
pscAutoConnectionBuilder_.setMessage(value);
}
connectionCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*/
public Builder setPscAutoConnection(
com.google.cloud.redis.cluster.v1.PscAutoConnection.Builder builderForValue) {
if (pscAutoConnectionBuilder_ == null) {
connection_ = builderForValue.build();
onChanged();
} else {
pscAutoConnectionBuilder_.setMessage(builderForValue.build());
}
connectionCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*/
public Builder mergePscAutoConnection(
com.google.cloud.redis.cluster.v1.PscAutoConnection value) {
if (pscAutoConnectionBuilder_ == null) {
if (connectionCase_ == 1
&& connection_
!= com.google.cloud.redis.cluster.v1.PscAutoConnection.getDefaultInstance()) {
connection_ =
com.google.cloud.redis.cluster.v1.PscAutoConnection.newBuilder(
(com.google.cloud.redis.cluster.v1.PscAutoConnection) connection_)
.mergeFrom(value)
.buildPartial();
} else {
connection_ = value;
}
onChanged();
} else {
if (connectionCase_ == 1) {
pscAutoConnectionBuilder_.mergeFrom(value);
} else {
pscAutoConnectionBuilder_.setMessage(value);
}
}
connectionCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*/
public Builder clearPscAutoConnection() {
if (pscAutoConnectionBuilder_ == null) {
if (connectionCase_ == 1) {
connectionCase_ = 0;
connection_ = null;
onChanged();
}
} else {
if (connectionCase_ == 1) {
connectionCase_ = 0;
connection_ = null;
}
pscAutoConnectionBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*/
public com.google.cloud.redis.cluster.v1.PscAutoConnection.Builder
getPscAutoConnectionBuilder() {
return getPscAutoConnectionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*/
@java.lang.Override
public com.google.cloud.redis.cluster.v1.PscAutoConnectionOrBuilder
getPscAutoConnectionOrBuilder() {
if ((connectionCase_ == 1) && (pscAutoConnectionBuilder_ != null)) {
return pscAutoConnectionBuilder_.getMessageOrBuilder();
} else {
if (connectionCase_ == 1) {
return (com.google.cloud.redis.cluster.v1.PscAutoConnection) connection_;
}
return com.google.cloud.redis.cluster.v1.PscAutoConnection.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created through
* service connectivity automation.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscAutoConnection psc_auto_connection = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.redis.cluster.v1.PscAutoConnection,
com.google.cloud.redis.cluster.v1.PscAutoConnection.Builder,
com.google.cloud.redis.cluster.v1.PscAutoConnectionOrBuilder>
getPscAutoConnectionFieldBuilder() {
if (pscAutoConnectionBuilder_ == null) {
if (!(connectionCase_ == 1)) {
connection_ = com.google.cloud.redis.cluster.v1.PscAutoConnection.getDefaultInstance();
}
pscAutoConnectionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.redis.cluster.v1.PscAutoConnection,
com.google.cloud.redis.cluster.v1.PscAutoConnection.Builder,
com.google.cloud.redis.cluster.v1.PscAutoConnectionOrBuilder>(
(com.google.cloud.redis.cluster.v1.PscAutoConnection) connection_,
getParentForChildren(),
isClean());
connection_ = null;
}
connectionCase_ = 1;
onChanged();
return pscAutoConnectionBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.redis.cluster.v1.PscConnection,
com.google.cloud.redis.cluster.v1.PscConnection.Builder,
com.google.cloud.redis.cluster.v1.PscConnectionOrBuilder>
pscConnectionBuilder_;
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*
* @return Whether the pscConnection field is set.
*/
@java.lang.Override
public boolean hasPscConnection() {
return connectionCase_ == 2;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*
* @return The pscConnection.
*/
@java.lang.Override
public com.google.cloud.redis.cluster.v1.PscConnection getPscConnection() {
if (pscConnectionBuilder_ == null) {
if (connectionCase_ == 2) {
return (com.google.cloud.redis.cluster.v1.PscConnection) connection_;
}
return com.google.cloud.redis.cluster.v1.PscConnection.getDefaultInstance();
} else {
if (connectionCase_ == 2) {
return pscConnectionBuilder_.getMessage();
}
return com.google.cloud.redis.cluster.v1.PscConnection.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*/
public Builder setPscConnection(com.google.cloud.redis.cluster.v1.PscConnection value) {
if (pscConnectionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
connection_ = value;
onChanged();
} else {
pscConnectionBuilder_.setMessage(value);
}
connectionCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*/
public Builder setPscConnection(
com.google.cloud.redis.cluster.v1.PscConnection.Builder builderForValue) {
if (pscConnectionBuilder_ == null) {
connection_ = builderForValue.build();
onChanged();
} else {
pscConnectionBuilder_.setMessage(builderForValue.build());
}
connectionCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*/
public Builder mergePscConnection(com.google.cloud.redis.cluster.v1.PscConnection value) {
if (pscConnectionBuilder_ == null) {
if (connectionCase_ == 2
&& connection_
!= com.google.cloud.redis.cluster.v1.PscConnection.getDefaultInstance()) {
connection_ =
com.google.cloud.redis.cluster.v1.PscConnection.newBuilder(
(com.google.cloud.redis.cluster.v1.PscConnection) connection_)
.mergeFrom(value)
.buildPartial();
} else {
connection_ = value;
}
onChanged();
} else {
if (connectionCase_ == 2) {
pscConnectionBuilder_.mergeFrom(value);
} else {
pscConnectionBuilder_.setMessage(value);
}
}
connectionCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*/
public Builder clearPscConnection() {
if (pscConnectionBuilder_ == null) {
if (connectionCase_ == 2) {
connectionCase_ = 0;
connection_ = null;
onChanged();
}
} else {
if (connectionCase_ == 2) {
connectionCase_ = 0;
connection_ = null;
}
pscConnectionBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*/
public com.google.cloud.redis.cluster.v1.PscConnection.Builder getPscConnectionBuilder() {
return getPscConnectionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*/
@java.lang.Override
public com.google.cloud.redis.cluster.v1.PscConnectionOrBuilder getPscConnectionOrBuilder() {
if ((connectionCase_ == 2) && (pscConnectionBuilder_ != null)) {
return pscConnectionBuilder_.getMessageOrBuilder();
} else {
if (connectionCase_ == 2) {
return (com.google.cloud.redis.cluster.v1.PscConnection) connection_;
}
return com.google.cloud.redis.cluster.v1.PscConnection.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Detailed information of a PSC connection that is created by the customer
* who owns the cluster.
* </pre>
*
* <code>.google.cloud.redis.cluster.v1.PscConnection psc_connection = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.redis.cluster.v1.PscConnection,
com.google.cloud.redis.cluster.v1.PscConnection.Builder,
com.google.cloud.redis.cluster.v1.PscConnectionOrBuilder>
getPscConnectionFieldBuilder() {
if (pscConnectionBuilder_ == null) {
if (!(connectionCase_ == 2)) {
connection_ = com.google.cloud.redis.cluster.v1.PscConnection.getDefaultInstance();
}
pscConnectionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.redis.cluster.v1.PscConnection,
com.google.cloud.redis.cluster.v1.PscConnection.Builder,
com.google.cloud.redis.cluster.v1.PscConnectionOrBuilder>(
(com.google.cloud.redis.cluster.v1.PscConnection) connection_,
getParentForChildren(),
isClean());
connection_ = null;
}
connectionCase_ = 2;
onChanged();
return pscConnectionBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.redis.cluster.v1.ConnectionDetail)
}
// @@protoc_insertion_point(class_scope:google.cloud.redis.cluster.v1.ConnectionDetail)
private static final com.google.cloud.redis.cluster.v1.ConnectionDetail DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.redis.cluster.v1.ConnectionDetail();
}
public static com.google.cloud.redis.cluster.v1.ConnectionDetail getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ConnectionDetail> PARSER =
new com.google.protobuf.AbstractParser<ConnectionDetail>() {
@java.lang.Override
public ConnectionDetail parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ConnectionDetail> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ConnectionDetail> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.redis.cluster.v1.ConnectionDetail getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/mina-sshd | 38,369 | sshd-common/src/main/java/org/apache/sshd/client/config/hosts/HostConfigEntry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.client.config.hosts;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.StreamCorruptedException;
import java.io.Writer;
import java.net.InetAddress;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableSet;
import java.util.Objects;
import java.util.TreeMap;
import org.apache.sshd.common.SshConstants;
import org.apache.sshd.common.auth.MutableUserHolder;
import org.apache.sshd.common.config.ConfigFileReaderSupport;
import org.apache.sshd.common.config.keys.PublicKeyEntry;
import org.apache.sshd.common.util.GenericUtils;
import org.apache.sshd.common.util.MapEntryUtils;
import org.apache.sshd.common.util.OsUtils;
import org.apache.sshd.common.util.ValidateUtils;
import org.apache.sshd.common.util.io.IoUtils;
import org.apache.sshd.common.util.io.PathUtils;
import org.apache.sshd.common.util.io.input.NoCloseInputStream;
import org.apache.sshd.common.util.io.input.NoCloseReader;
import org.apache.sshd.common.util.io.output.NoCloseOutputStream;
/**
* Represents an entry in the client's configuration file as defined by the
* <A HREF="https://linux.die.net/man/5/ssh_config">ssh_config</A> configuration file format
*
* @author <a href="mailto:dev@mina.apache.org">Apache MINA SSHD Project</a>
* @see <A HREF="https://www.cyberciti.biz/faq/create-ssh-config-file-on-linux-unix/">OpenSSH Config File
* Examples</A>
*/
public class HostConfigEntry extends HostPatternsHolder implements MutableUserHolder {
/**
* Standard OpenSSH config file name
*/
public static final String STD_CONFIG_FILENAME = "config";
public static final String HOST_CONFIG_PROP = "Host";
public static final String MATCH_CONFIG_PROP = "Match"; // currently not handled
public static final String HOST_NAME_CONFIG_PROP = "HostName";
public static final String PORT_CONFIG_PROP = ConfigFileReaderSupport.PORT_CONFIG_PROP;
public static final String USER_CONFIG_PROP = "User";
public static final String PROXY_JUMP_CONFIG_PROP = "ProxyJump";
public static final String IDENTITY_FILE_CONFIG_PROP = "IdentityFile";
public static final String CERTIFICATE_FILE_CONFIG_PROP = "CertificateFile"; // currently not handled
/**
* Use only the identities specified in the host entry (if any)
*/
public static final String EXCLUSIVE_IDENTITIES_CONFIG_PROP = "IdentitiesOnly";
public static final boolean DEFAULT_EXCLUSIVE_IDENTITIES = false;
/**
* The IdentityAgent configuration. If not set in the {@link HostConfigEntry}, the value of this
* {@link #getProperty(String) property} is {@code null}, which means that a default SSH agent is to be used, if it
* is running. Other values defined by OpenSSH are:
* <ul>
* <dl>
* <dt>none</dt>
* <dd>No SHH agent is to be used at all, even if one is running.</dd>
* <dt>SSH_AUTH_SOCK</dt>
* <dd>The SSH agent listening on the Unix domain socket given by the environment variable {@code SSH_AUTH_SOCK}
* shall be used. If the environment variable is not set, no SSH agent is used.</dd>
* <dt><em>other</em></dt>
* <dd>For OpenSSH, the value shall resolve to the file name of a Unix domain socket to use to connect to an SSH
* agent.</dd>
* </dl>
*/
public static final String IDENTITY_AGENT = "IdentityAgent";
/**
* A case <U>insensitive</U> {@link NavigableSet} of the properties that receive special handling
*/
public static final NavigableSet<String> EXPLICIT_PROPERTIES = Collections.unmodifiableNavigableSet(
GenericUtils.asSortedSet(String.CASE_INSENSITIVE_ORDER,
HOST_CONFIG_PROP, HOST_NAME_CONFIG_PROP, PORT_CONFIG_PROP,
USER_CONFIG_PROP, IDENTITY_FILE_CONFIG_PROP, EXCLUSIVE_IDENTITIES_CONFIG_PROP));
public static final String MULTI_VALUE_SEPARATORS = " ,";
public static final char PATH_MACRO_CHAR = '%';
public static final char LOCAL_HOME_MACRO = 'd';
public static final char LOCAL_USER_MACRO = 'u';
public static final char LOCAL_HOST_MACRO = 'l';
public static final char REMOTE_HOST_MACRO = 'h';
public static final char REMOTE_USER_MACRO = 'r';
// Extra - not part of the standard
public static final char REMOTE_PORT_MACRO = 'p';
private static final class LazyDefaultConfigFileHolder {
private static final Path CONFIG_FILE = PublicKeyEntry.getDefaultKeysFolderPath().resolve(STD_CONFIG_FILENAME);
private LazyDefaultConfigFileHolder() {
throw new UnsupportedOperationException("No instance allowed");
}
}
// TODO: A better approach would be to only store "host" and the properties map. Accessors can read/write the properties map.
// TODO: Map property key to generic object. Any code that calls getProperties() would need to be updated.
protected String host;
protected String hostName;
protected int port;
protected String username;
protected String proxyJump;
protected Boolean exclusiveIdentites;
// TODO: OpenSSH ignores duplicates. Ignoring them here (via a set) would complicate keeping the map entry in sync.
protected final Collection<String> identities = new ArrayList<>();
protected final Map<String, String> properties = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
public HostConfigEntry() {
super();
}
public HostConfigEntry(String pattern, String host, int port, String username) {
this(pattern, host, port, username, null);
}
public HostConfigEntry(String pattern, String host, int port, String username, String proxyJump) {
setHost(pattern);
setHostName(host);
setPort(port);
setUsername(username);
setProxyJump(proxyJump);
}
/**
* Merges that into this via underride. That is, any value present in this entry takes precedence over the given
* entry. Only this object is modified. The given entry remains unchanged.
*
* @param that The HostConfigEntry to merge.
*/
public void collate(HostConfigEntry that) {
if (hostName == null || hostName.isEmpty()) {
hostName = that.hostName; // It doesn't matter whether that host is defined or not, since ours is not.
}
if (port <= 0) {
port = that.port;
}
if (username == null || username.isEmpty()) {
username = that.username;
}
if (proxyJump == null || proxyJump.isEmpty()) {
proxyJump = that.proxyJump;
}
if (exclusiveIdentites == null) {
exclusiveIdentites = that.exclusiveIdentites;
}
identities.addAll(that.identities);
for (Entry<String, String> e : that.properties.entrySet()) {
String key = e.getKey();
String value = e.getValue();
if (properties.containsKey(key)) {
if (key.equalsIgnoreCase(IDENTITY_FILE_CONFIG_PROP) || key.equalsIgnoreCase(CERTIFICATE_FILE_CONFIG_PROP)) {
properties.put(key, properties.get(key) + "," + value);
}
// else ignore, since our value takes precedence over that
} else { // key is not present in our properties
properties.put(key, value);
}
}
}
/**
* @return The <U>pattern(s)</U> represented by this entry
*/
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
setPatterns(parsePatterns(parseConfigValue(host)));
}
public void setHost(Collection<String> patterns) {
this.host = GenericUtils.join(ValidateUtils.checkNotNullAndNotEmpty(patterns, "No patterns"), ',');
setPatterns(parsePatterns(patterns));
}
/**
* @return The effective host name to connect to if the pattern matches
*/
public String getHostName() {
return hostName;
}
public void setHostName(String hostName) {
this.hostName = hostName;
setProperty(HOST_NAME_CONFIG_PROP, hostName);
}
/**
* @return A port override - if positive
*/
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
if (port <= 0) {
properties.remove(PORT_CONFIG_PROP);
} else {
setProperty(PORT_CONFIG_PROP, String.valueOf(port));
}
}
/**
* @return A username override - if not {@code null}/empty
*/
@Override
public String getUsername() {
return username;
}
@Override
public void setUsername(String username) {
this.username = username;
setProperty(USER_CONFIG_PROP, username);
}
/**
* @return the host to use as a proxy
*/
public String getProxyJump() {
return proxyJump;
}
public void setProxyJump(String proxyJump) {
this.proxyJump = proxyJump;
setProperty(PROXY_JUMP_CONFIG_PROP, proxyJump);
}
/**
* @return The current identities file paths - may be {@code null}/empty
*/
public Collection<String> getIdentities() {
return identities;
}
/**
* @param path A {@link Path} to a file that contains an identity key - never {@code null}
*/
public void addIdentity(Path path) {
addIdentity(Objects.requireNonNull(path, "No path").toAbsolutePath().normalize().toString());
}
/**
* Adds a path to an identity file
*
* @param id The identity path to add - never {@code null}
*/
public void addIdentity(String id) {
String path = ValidateUtils.hasContent(id, "No identity provided");
identities.add(path);
appendPropertyValue(IDENTITY_FILE_CONFIG_PROP, id);
}
public void setIdentities(Collection<String> identities) {
this.identities.clear();
properties.remove(IDENTITY_FILE_CONFIG_PROP);
if (identities != null) {
identities.forEach(this::addIdentity);
}
}
/**
* @return {@code true} if must use only the identities in this entry
*/
public boolean isIdentitiesOnly() {
return (exclusiveIdentites == null) ? DEFAULT_EXCLUSIVE_IDENTITIES : exclusiveIdentites;
}
public void setIdentitiesOnly(boolean identitiesOnly) {
exclusiveIdentites = identitiesOnly;
setProperty(EXCLUSIVE_IDENTITIES_CONFIG_PROP, Boolean.toString(identitiesOnly));
}
/**
* @return A {@link Map} of extra properties that have been read - may be {@code null}/empty, or even contain some
* values that have been parsed and set as members of the entry (e.g., host, port, etc.). <B>Note:</B>
* multi-valued keys use a comma-separated list of values
*/
public Map<String, String> getProperties() {
return properties;
}
/**
* @param name Property name - never {@code null}/empty
* @return Property value or {@code null} if no such property
* @see #getProperty(String, String)
*/
public String getProperty(String name) {
return getProperty(name, null);
}
/**
* @param name Property name - never {@code null}/empty
* @param defaultValue Default value to return if no such property
* @return The property value or the default one if no such property
*/
public String getProperty(String name, String defaultValue) {
String key = ValidateUtils.checkNotNullAndNotEmpty(name, "No property name");
Map<String, String> props = getProperties();
if (MapEntryUtils.isEmpty(props)) {
return defaultValue;
}
String value = props.get(key);
if (GenericUtils.isEmpty(value)) {
return defaultValue;
} else {
return value;
}
}
/**
* @param name Property name - never {@code null}/empty
* @param valsList The available values for the property
* @see #HOST_NAME_CONFIG_PROP
* @see #PORT_CONFIG_PROP
* @see #USER_CONFIG_PROP
* @see #IDENTITY_FILE_CONFIG_PROP
*/
public void processProperty(String name, Collection<String> valsList) {
String key = ValidateUtils.checkNotNullAndNotEmpty(name, "No property name");
String joinedValue = GenericUtils.join(valsList, ',');
if (HOST_NAME_CONFIG_PROP.equalsIgnoreCase(key)) {
ValidateUtils.checkTrue(GenericUtils.size(valsList) == 1, "Multiple target hosts N/A: %s", joinedValue);
setHostName(joinedValue);
} else if (PORT_CONFIG_PROP.equalsIgnoreCase(key)) {
ValidateUtils.checkTrue(GenericUtils.size(valsList) == 1, "Multiple target ports N/A: %s", joinedValue);
int newValue = Integer.parseInt(joinedValue);
ValidateUtils.checkTrue(newValue > 0, "Bad new port value: %d", newValue);
setPort(newValue);
} else if (USER_CONFIG_PROP.equalsIgnoreCase(key)) {
ValidateUtils.checkTrue(GenericUtils.size(valsList) == 1, "Multiple target users N/A: %s", joinedValue);
setUsername(joinedValue);
} else if (IDENTITY_FILE_CONFIG_PROP.equalsIgnoreCase(key)) {
ValidateUtils.checkTrue(GenericUtils.size(valsList) > 0, "No identity files specified");
for (String id : valsList) {
addIdentity(id);
}
} else if (EXCLUSIVE_IDENTITIES_CONFIG_PROP.equalsIgnoreCase(key)) {
setIdentitiesOnly(
ConfigFileReaderSupport.parseBooleanValue(
ValidateUtils.checkNotNullAndNotEmpty(joinedValue, "No identities option value")));
} else if (PROXY_JUMP_CONFIG_PROP.equalsIgnoreCase(key)) {
setProxyJump(joinedValue);
} else if (CERTIFICATE_FILE_CONFIG_PROP.equalsIgnoreCase(key)) {
appendPropertyValue(key, joinedValue);
} else {
properties.put(key, joinedValue); // Default is to overwrite any previous value. Only identities
}
}
/**
* Appends a value using a <U>comma</U> to an existing one. If no previous value then same as calling
* {@link #setProperty(String, String)}.
*
* @param name Property name - never {@code null}/empty
* @param value The value to be appended - ignored if {@code null}/empty
* @return The value <U>before</U> appending - {@code null} if no previous value
*/
public String appendPropertyValue(String name, String value) {
String key = ValidateUtils.checkNotNullAndNotEmpty(name, "No property name");
String curVal = getProperty(key);
if (GenericUtils.isEmpty(value)) {
return curVal;
}
if (GenericUtils.isEmpty(curVal)) {
return setProperty(key, value);
}
return setProperty(key, curVal + ',' + value);
}
/**
* Sets / Replaces the property value
*
* @param name Property name - never {@code null}/empty
* @param value Property value - if {@code null}/empty then {@link #removeProperty(String)} is called
* @return The previous property value - {@code null} if no such name
*/
public String setProperty(String name, String value) {
if (GenericUtils.isEmpty(value)) {
return removeProperty(name);
}
String key = ValidateUtils.checkNotNullAndNotEmpty(name, "No property name");
return properties.put(key, value);
}
/**
* @param name Property name - never {@code null}/empty
* @return The removed property value - {@code null} if no such property name
*/
public String removeProperty(String name) {
String key = ValidateUtils.checkNotNullAndNotEmpty(name, "No property name");
Map<String, String> props = getProperties();
if (MapEntryUtils.isEmpty(props)) {
return null;
} else {
return props.remove(key);
}
}
/**
* @param properties The properties to set - if {@code null} then an empty map is effectively set. <B>Note:</B> it
* is highly recommended to use a <U>case insensitive</U> key mapper.
*/
public void setProperties(Map<String, String> properties) {
this.properties.clear();
if (properties != null) {
this.properties.putAll(properties);
}
}
public <A extends Appendable> A append(A sb) throws IOException {
sb.append(HOST_CONFIG_PROP).append(' ').append(ValidateUtils.checkNotNullAndNotEmpty(getHost(), "No host pattern"))
.append(IoUtils.EOL);
appendNonEmptyProperty(sb, HOST_NAME_CONFIG_PROP, getHostName());
appendNonEmptyPort(sb, PORT_CONFIG_PROP, getPort());
appendNonEmptyProperty(sb, USER_CONFIG_PROP, getUsername());
appendNonEmptyValues(sb, IDENTITY_FILE_CONFIG_PROP, getIdentities());
if (exclusiveIdentites != null) {
appendNonEmptyProperty(sb, EXCLUSIVE_IDENTITIES_CONFIG_PROP,
ConfigFileReaderSupport.yesNoValueOf(exclusiveIdentites));
}
appendNonEmptyProperties(sb, getProperties());
return sb;
}
@Override
public String toString() {
return getHost() + ": " + getUsername() + "@" + getHostName() + ":" + getPort();
}
/**
* @param <A> The {@link Appendable} type
* @param sb The target appender
* @param name The property name - never {@code null}/empty
* @param port The port value - ignored if non-positive
* @return The target appender after having appended (or not) the value
* @throws IOException If failed to append the requested data
* @see #appendNonEmptyProperty(Appendable, String, Object)
*/
public static <A extends Appendable> A appendNonEmptyPort(A sb, String name, int port) throws IOException {
return appendNonEmptyProperty(sb, name, (port > 0) ? Integer.toString(port) : null);
}
/**
* Appends the extra properties - while skipping the {@link #EXPLICIT_PROPERTIES} ones
*
* @param <A> The {@link Appendable} type
* @param sb The target appender
* @param props The {@link Map} of properties - ignored if {@code null}/empty
* @return The target appender after having appended (or not) the value
* @throws IOException If failed to append the requested data
* @see #appendNonEmptyProperty(Appendable, String, Object)
*/
public static <A extends Appendable> A appendNonEmptyProperties(A sb, Map<String, ?> props) throws IOException {
if (MapEntryUtils.isEmpty(props)) {
return sb;
}
// Cannot use forEach because of the IOException being thrown by appendNonEmptyProperty
for (Map.Entry<String, ?> pe : props.entrySet()) {
String name = pe.getKey();
if (EXPLICIT_PROPERTIES.contains(name)) {
continue;
}
appendNonEmptyProperty(sb, name, pe.getValue());
}
return sb;
}
/**
* @param <A> The {@link Appendable} type
* @param sb The target appender
* @param name The property name - never {@code null}/empty
* @param value The property value - ignored if {@code null}. <B>Note:</B> if the string representation of
* the value contains any commas, they are assumed to indicate a multi-valued property which is
* broken down to <U>individual</U> lines - one per value.
* @return The target appender after having appended (or not) the value
* @throws IOException If failed to append the requested data
* @see #appendNonEmptyValues(Appendable, String, Object...)
*/
public static <A extends Appendable> A appendNonEmptyProperty(A sb, String name, Object value) throws IOException {
String s = Objects.toString(value, null);
String[] vals = GenericUtils.split(s, ',');
return appendNonEmptyValues(sb, name, (Object[]) vals);
}
/**
* @param <A> The {@link Appendable} type
* @param sb The target appender
* @param name The property name - never {@code null}/empty
* @param values The values to be added - one per line - ignored if {@code null}/empty
* @return The target appender after having appended (or not) the value
* @throws IOException If failed to append the requested data
* @see #appendNonEmptyValues(Appendable, String, Collection)
*/
public static <A extends Appendable> A appendNonEmptyValues(A sb, String name, Object... values) throws IOException {
return appendNonEmptyValues(sb, name, GenericUtils.isEmpty(values) ? Collections.emptyList() : Arrays.asList(values));
}
/**
* @param <A> The {@link Appendable} type
* @param sb The target appender
* @param name The property name - never {@code null}/empty
* @param values The values to be added - one per line - ignored if {@code null}/empty
* @return The target appender after having appended (or not) the value
* @throws IOException If failed to append the requested data
*/
public static <A extends Appendable> A appendNonEmptyValues(A sb, String name, Collection<?> values) throws IOException {
String k = ValidateUtils.checkNotNullAndNotEmpty(name, "No property name");
if (GenericUtils.isEmpty(values)) {
return sb;
}
for (Object v : values) {
sb.append(" ").append(k).append(' ').append(Objects.toString(v)).append(IoUtils.EOL);
}
return sb;
}
/**
* Locates all the matching entries for a give host name / address
*
* @param host The host name / address - ignored if {@code null}/empty
* @param entries The {@link HostConfigEntry}-ies to scan - ignored if {@code null}/empty
* @return A {@link List} of all the matching entries
* @see #isHostMatch(String, int)
*/
public static List<HostConfigEntry> findMatchingEntries(String host, HostConfigEntry... entries) {
if (GenericUtils.isEmpty(host) || GenericUtils.isEmpty(entries)) {
return Collections.emptyList();
} else {
return findMatchingEntries(host, Arrays.asList(entries));
}
}
/**
* Locates all the matching entries for a give host name / address
*
* @param host The host name / address - ignored if {@code null}/empty
* @param entries The {@link HostConfigEntry}-ies to scan - ignored if {@code null}/empty
* @return A {@link List} of all the matching entries
* @see #isHostMatch(String, int)
*/
public static List<HostConfigEntry> findMatchingEntries(String host, Collection<? extends HostConfigEntry> entries) {
if (GenericUtils.isEmpty(host) || GenericUtils.isEmpty(entries)) {
return Collections.emptyList();
}
List<HostConfigEntry> matches = null;
for (HostConfigEntry entry : entries) {
if (!entry.isHostMatch(host, 0 /* any port */)) {
continue; // debug breakpoint
}
if (matches == null) {
matches = new ArrayList<>(entries.size()); // in case ALL of them match
}
matches.add(entry);
}
if (matches == null) {
return Collections.emptyList();
} else {
return matches;
}
}
/**
* @param entries The entries - ignored if {@code null}/empty
* @return A {@link HostConfigEntryResolver} wrapper using the entries
*/
public static HostConfigEntryResolver toHostConfigEntryResolver(Collection<? extends HostConfigEntry> entries) {
if (GenericUtils.isEmpty(entries)) {
return HostConfigEntryResolver.EMPTY;
} else {
return (host, port, lclAddress, username, proxyJump, ctx) -> {
List<HostConfigEntry> matches = findMatchingEntries(host, entries);
int numMatches = GenericUtils.size(matches);
if (numMatches <= 0) {
return null;
}
// Collate attributes from all matching entries.
HostConfigEntry entry = new HostConfigEntry(host, null, port, username);
for (HostConfigEntry m : matches) {
entry.collate(m);
}
// Apply standard defaults.
String temp = entry.getHostName(); // Remember that this was null above.
if (temp == null || temp.isEmpty()) {
entry.setHostName(host);
}
temp = entry.getUsername();
if (temp == null || temp.isEmpty()) {
entry.setUsername(OsUtils.getCurrentUser());
}
if (entry.getPort() < 1) {
entry.setPort(SshConstants.DEFAULT_PORT);
}
// Resolve file names
Collection<String> identities = entry.getIdentities();
if (!GenericUtils.isEmpty(identities)) {
identities = new ArrayList<>(identities);
entry.setIdentities(Collections.emptyList());
for (String id : identities) {
entry.addIdentity(
resolveIdentityFilePath(id, entry.getHostName(), entry.getPort(), entry.getUsername()));
}
}
// Same for CertificateFile
String certificateFiles = entry.getProperty(CERTIFICATE_FILE_CONFIG_PROP);
if (!GenericUtils.isEmpty(certificateFiles)) {
entry.removeProperty(CERTIFICATE_FILE_CONFIG_PROP);
String[] split = certificateFiles.split(",");
List<String> resolved = new ArrayList<>(split.length);
for (String raw : split) {
resolved.add(resolveIdentityFilePath(raw, entry.getHostName(), entry.getPort(), entry.getUsername()));
}
entry.processProperty(CERTIFICATE_FILE_CONFIG_PROP, resolved);
}
return entry;
};
}
}
public static List<HostConfigEntry> readHostConfigEntries(Path path, OpenOption... options) throws IOException {
try (InputStream input = Files.newInputStream(path, options)) {
return readHostConfigEntries(input, true);
}
}
public static List<HostConfigEntry> readHostConfigEntries(URL url) throws IOException {
try (InputStream input = url.openStream()) {
return readHostConfigEntries(input, true);
}
}
public static List<HostConfigEntry> readHostConfigEntries(InputStream inStream, boolean okToClose) throws IOException {
try (Reader reader
= new InputStreamReader(NoCloseInputStream.resolveInputStream(inStream, okToClose), StandardCharsets.UTF_8)) {
return readHostConfigEntries(reader, true);
}
}
public static List<HostConfigEntry> readHostConfigEntries(Reader rdr, boolean okToClose) throws IOException {
try (BufferedReader buf = new BufferedReader(NoCloseReader.resolveReader(rdr, okToClose))) {
return readHostConfigEntries(buf);
}
}
/**
* Reads configuration entries
*
* @param rdr The {@link BufferedReader} to use
* @return The {@link List} of read {@link HostConfigEntry}-ies
* @throws IOException If failed to parse the read configuration
*/
public static List<HostConfigEntry> readHostConfigEntries(BufferedReader rdr) throws IOException {
HostConfigEntry curEntry = null;
List<HostConfigEntry> entries = new ArrayList<>();
int lineNumber = 1;
for (String line = rdr.readLine(); line != null; line = rdr.readLine(), lineNumber++) {
line = GenericUtils.replaceWhitespaceAndTrim(line);
if (GenericUtils.isEmpty(line)) {
continue;
}
// Strip off comments
int pos = line.indexOf(ConfigFileReaderSupport.COMMENT_CHAR);
if (pos == 0) {
continue;
}
if (pos > 0) {
line = line.substring(0, pos);
line = line.trim();
}
/*
* Some options use '=' as delimiter, others use ' '
* TODO: This version treats '=' as taking precedence, but that means '=' can't show up
* in a file name. A better approach is to break the line into tokens, possibly quoted,
* then detect '='.
*/
String key;
String value;
List<String> valsList;
pos = line.indexOf('=');
if (pos > 0) {
key = line.substring(0, pos).trim();
value = line.substring(pos + 1);
valsList = new ArrayList<>(1);
valsList.add(value);
} else {
pos = line.indexOf(' ');
if (pos < 0) {
throw new StreamCorruptedException("No configuration value delimiter at line " + lineNumber + ": " + line);
}
key = line.substring(0, pos);
value = line.substring(pos + 1);
valsList = GenericUtils.filterToNotBlank(parseConfigValue(value));
}
// Detect transition to new entry.
if (HOST_CONFIG_PROP.equalsIgnoreCase(key)) {
if (GenericUtils.isEmpty(valsList)) {
throw new StreamCorruptedException("Missing host pattern(s) at line " + lineNumber + ": " + line);
}
if (curEntry != null) {
entries.add(curEntry);
}
curEntry = new HostConfigEntry();
curEntry.setHost(valsList);
} else if (MATCH_CONFIG_PROP.equalsIgnoreCase(key)) {
throw new StreamCorruptedException("Currently not able to process Match sections");
} else if (curEntry == null) {
// Properties that occur before the first Host or Match keyword are a kind of global entry.
curEntry = new HostConfigEntry();
curEntry.setHost(Collections.singletonList(ALL_HOSTS_PATTERN));
}
String joinedValue = GenericUtils.join(valsList, ',');
curEntry.appendPropertyValue(key, joinedValue);
curEntry.processProperty(key, valsList);
}
if (curEntry != null) {
entries.add(curEntry);
}
return entries;
}
public static void writeHostConfigEntries(
Path path, Collection<? extends HostConfigEntry> entries, OpenOption... options)
throws IOException {
try (OutputStream outputStream = Files.newOutputStream(path, options)) {
writeHostConfigEntries(outputStream, true, entries);
}
}
public static void writeHostConfigEntries(
OutputStream outputStream, boolean okToClose, Collection<? extends HostConfigEntry> entries)
throws IOException {
if (GenericUtils.isEmpty(entries)) {
return;
}
try (Writer w = new OutputStreamWriter(
NoCloseOutputStream.resolveOutputStream(outputStream, okToClose), StandardCharsets.UTF_8)) {
appendHostConfigEntries(w, entries);
}
}
public static <A extends Appendable> A appendHostConfigEntries(A sb, Collection<? extends HostConfigEntry> entries)
throws IOException {
if (GenericUtils.isEmpty(entries)) {
return sb;
}
for (HostConfigEntry entry : entries) {
entry.append(sb);
}
return sb;
}
/**
* Checks if this is a multi-value - allow space and comma
*
* @todo Handle quote marks.
* @param value The value - ignored if {@code null}/empty (after trimming)
* @return A {@link List} of the encountered values
*/
public static List<String> parseConfigValue(String value) {
String s = GenericUtils.replaceWhitespaceAndTrim(value);
if (GenericUtils.isEmpty(s)) {
return Collections.emptyList();
}
for (int index = 0; index < MULTI_VALUE_SEPARATORS.length(); index++) {
char sep = MULTI_VALUE_SEPARATORS.charAt(index);
int pos = s.indexOf(sep);
if (pos >= 0) {
String[] vals = GenericUtils.split(s, sep);
if (GenericUtils.isEmpty(vals)) {
return Collections.emptyList();
} else {
return Arrays.asList(vals);
}
}
}
// this point is reached if no separators found
return Collections.singletonList(s);
}
// The file name may use the tilde syntax to refer to a user’s home directory or one of the following escape
// characters:
// '%d' (local user's home directory), '%u' (local user name), '%l' (local host name), '%h' (remote host name) or
// '%r' (remote user name).
public static String resolveIdentityFilePath(String id, String host, int port, String username) throws IOException {
if (GenericUtils.isEmpty(id)) {
return id;
}
String path = id.replace('/', File.separatorChar); // make sure all separators are local
String[] elements = GenericUtils.split(path, File.separatorChar);
StringBuilder sb = new StringBuilder(path.length() + Long.SIZE);
for (int index = 0; index < elements.length; index++) {
String elem = elements[index];
if (index > 0) {
sb.append(File.separatorChar);
}
for (int curPos = 0; curPos < elem.length(); curPos++) {
char ch = elem.charAt(curPos);
if (ch == PathUtils.HOME_TILDE_CHAR) {
ValidateUtils.checkTrue((curPos == 0) && (index == 0), "Home tilde must be first: %s", id);
PathUtils.appendUserHome(sb);
} else if (ch == PATH_MACRO_CHAR) {
curPos++;
ValidateUtils.checkTrue(curPos < elem.length(), "Missing macro modifier in %s", id);
ch = elem.charAt(curPos);
switch (ch) {
case PATH_MACRO_CHAR:
sb.append(ch);
break;
case LOCAL_HOME_MACRO:
ValidateUtils.checkTrue((curPos == 1) && (index == 0), "Home macro must be first: %s", id);
PathUtils.appendUserHome(sb);
break;
case LOCAL_USER_MACRO:
sb.append(OsUtils.getCurrentUser());
break;
case LOCAL_HOST_MACRO: {
InetAddress address = Objects.requireNonNull(InetAddress.getLocalHost(), "No local address");
sb.append(ValidateUtils.checkNotNullAndNotEmpty(address.getHostName(), "No local name"));
break;
}
case REMOTE_HOST_MACRO:
sb.append(ValidateUtils.checkNotNullAndNotEmpty(host, "No remote host provided"));
break;
case REMOTE_USER_MACRO:
sb.append(ValidateUtils.hasContent(username, "No remote user provided"));
break;
case REMOTE_PORT_MACRO:
ValidateUtils.checkTrue(port > 0, "Bad remote port value: %d", port);
sb.append(port);
break;
default:
ValidateUtils.throwIllegalArgumentException("Bad modifier '%s' in %s", String.valueOf(ch), id);
}
} else {
sb.append(ch);
}
}
}
return sb.toString();
}
/**
* @return The default {@link Path} location of the OpenSSH hosts entries configuration file
*/
@SuppressWarnings("synthetic-access")
public static Path getDefaultHostConfigFile() {
return LazyDefaultConfigFileHolder.CONFIG_FILE;
}
}
|
googleapis/google-cloud-java | 38,097 | java-aiplatform/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/GenAiTuningServiceClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.aiplatform.v1beta1;
import static com.google.cloud.aiplatform.v1beta1.GenAiTuningServiceClient.ListLocationsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.GenAiTuningServiceClient.ListTuningJobsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.iam.v1.AuditConfig;
import com.google.iam.v1.Binding;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.GetPolicyOptions;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.ByteString;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import com.google.rpc.Status;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class GenAiTuningServiceClientTest {
private static MockGenAiTuningService mockGenAiTuningService;
private static MockIAMPolicy mockIAMPolicy;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private GenAiTuningServiceClient client;
@BeforeClass
public static void startStaticServer() {
mockGenAiTuningService = new MockGenAiTuningService();
mockLocations = new MockLocations();
mockIAMPolicy = new MockIAMPolicy();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(),
Arrays.<MockGrpcService>asList(mockGenAiTuningService, mockLocations, mockIAMPolicy));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
GenAiTuningServiceSettings settings =
GenAiTuningServiceSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = GenAiTuningServiceClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void createTuningJobTest() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setCustomBaseModel("customBaseModel942572135")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setPipelineJob(
PipelineJobName.of("[PROJECT]", "[LOCATION]", "[PIPELINE_JOB]").toString())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setOutputUri("outputUri-2119300949")
.addAllEvaluateDatasetRuns(new ArrayList<EvaluateDatasetRun>())
.build();
mockGenAiTuningService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
TuningJob tuningJob = TuningJob.newBuilder().build();
TuningJob actualResponse = client.createTuningJob(parent, tuningJob);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateTuningJobRequest actualRequest = ((CreateTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(tuningJob, actualRequest.getTuningJob());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createTuningJobExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
TuningJob tuningJob = TuningJob.newBuilder().build();
client.createTuningJob(parent, tuningJob);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createTuningJobTest2() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setCustomBaseModel("customBaseModel942572135")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setPipelineJob(
PipelineJobName.of("[PROJECT]", "[LOCATION]", "[PIPELINE_JOB]").toString())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setOutputUri("outputUri-2119300949")
.addAllEvaluateDatasetRuns(new ArrayList<EvaluateDatasetRun>())
.build();
mockGenAiTuningService.addResponse(expectedResponse);
String parent = "parent-995424086";
TuningJob tuningJob = TuningJob.newBuilder().build();
TuningJob actualResponse = client.createTuningJob(parent, tuningJob);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateTuningJobRequest actualRequest = ((CreateTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(tuningJob, actualRequest.getTuningJob());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createTuningJobExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
String parent = "parent-995424086";
TuningJob tuningJob = TuningJob.newBuilder().build();
client.createTuningJob(parent, tuningJob);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getTuningJobTest() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setCustomBaseModel("customBaseModel942572135")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setPipelineJob(
PipelineJobName.of("[PROJECT]", "[LOCATION]", "[PIPELINE_JOB]").toString())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setOutputUri("outputUri-2119300949")
.addAllEvaluateDatasetRuns(new ArrayList<EvaluateDatasetRun>())
.build();
mockGenAiTuningService.addResponse(expectedResponse);
TuningJobName name = TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]");
TuningJob actualResponse = client.getTuningJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetTuningJobRequest actualRequest = ((GetTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getTuningJobExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
TuningJobName name = TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]");
client.getTuningJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getTuningJobTest2() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setCustomBaseModel("customBaseModel942572135")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setPipelineJob(
PipelineJobName.of("[PROJECT]", "[LOCATION]", "[PIPELINE_JOB]").toString())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setOutputUri("outputUri-2119300949")
.addAllEvaluateDatasetRuns(new ArrayList<EvaluateDatasetRun>())
.build();
mockGenAiTuningService.addResponse(expectedResponse);
String name = "name3373707";
TuningJob actualResponse = client.getTuningJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetTuningJobRequest actualRequest = ((GetTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getTuningJobExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
String name = "name3373707";
client.getTuningJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listTuningJobsTest() throws Exception {
TuningJob responsesElement = TuningJob.newBuilder().build();
ListTuningJobsResponse expectedResponse =
ListTuningJobsResponse.newBuilder()
.setNextPageToken("")
.addAllTuningJobs(Arrays.asList(responsesElement))
.build();
mockGenAiTuningService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
ListTuningJobsPagedResponse pagedListResponse = client.listTuningJobs(parent);
List<TuningJob> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getTuningJobsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListTuningJobsRequest actualRequest = ((ListTuningJobsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listTuningJobsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
client.listTuningJobs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listTuningJobsTest2() throws Exception {
TuningJob responsesElement = TuningJob.newBuilder().build();
ListTuningJobsResponse expectedResponse =
ListTuningJobsResponse.newBuilder()
.setNextPageToken("")
.addAllTuningJobs(Arrays.asList(responsesElement))
.build();
mockGenAiTuningService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListTuningJobsPagedResponse pagedListResponse = client.listTuningJobs(parent);
List<TuningJob> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getTuningJobsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListTuningJobsRequest actualRequest = ((ListTuningJobsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listTuningJobsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
String parent = "parent-995424086";
client.listTuningJobs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void cancelTuningJobTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockGenAiTuningService.addResponse(expectedResponse);
TuningJobName name = TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]");
client.cancelTuningJob(name);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CancelTuningJobRequest actualRequest = ((CancelTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void cancelTuningJobExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
TuningJobName name = TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]");
client.cancelTuningJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void cancelTuningJobTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockGenAiTuningService.addResponse(expectedResponse);
String name = "name3373707";
client.cancelTuningJob(name);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CancelTuningJobRequest actualRequest = ((CancelTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void cancelTuningJobExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
String name = "name3373707";
client.cancelTuningJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void rebaseTunedModelTest() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setCustomBaseModel("customBaseModel942572135")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setPipelineJob(
PipelineJobName.of("[PROJECT]", "[LOCATION]", "[PIPELINE_JOB]").toString())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setOutputUri("outputUri-2119300949")
.addAllEvaluateDatasetRuns(new ArrayList<EvaluateDatasetRun>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("rebaseTunedModelTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockGenAiTuningService.addResponse(resultOperation);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
TunedModelRef tunedModelRef = TunedModelRef.newBuilder().build();
TuningJob actualResponse = client.rebaseTunedModelAsync(parent, tunedModelRef).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
RebaseTunedModelRequest actualRequest = ((RebaseTunedModelRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(tunedModelRef, actualRequest.getTunedModelRef());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void rebaseTunedModelExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
TunedModelRef tunedModelRef = TunedModelRef.newBuilder().build();
client.rebaseTunedModelAsync(parent, tunedModelRef).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void rebaseTunedModelTest2() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setCustomBaseModel("customBaseModel942572135")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setPipelineJob(
PipelineJobName.of("[PROJECT]", "[LOCATION]", "[PIPELINE_JOB]").toString())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setOutputUri("outputUri-2119300949")
.addAllEvaluateDatasetRuns(new ArrayList<EvaluateDatasetRun>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("rebaseTunedModelTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockGenAiTuningService.addResponse(resultOperation);
String parent = "parent-995424086";
TunedModelRef tunedModelRef = TunedModelRef.newBuilder().build();
TuningJob actualResponse = client.rebaseTunedModelAsync(parent, tunedModelRef).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
RebaseTunedModelRequest actualRequest = ((RebaseTunedModelRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(tunedModelRef, actualRequest.getTunedModelRef());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void rebaseTunedModelExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
String parent = "parent-995424086";
TunedModelRef tunedModelRef = TunedModelRef.newBuilder().build();
client.rebaseTunedModelAsync(parent, tunedModelRef).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
Policy actualResponse = client.setIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPolicy(), actualRequest.getPolicy());
Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void setIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
client.setIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
Policy actualResponse = client.getIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getOptions(), actualRequest.getOptions());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
client.getIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void testIamPermissionsTest() throws Exception {
TestIamPermissionsResponse expectedResponse =
TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build();
mockIAMPolicy.addResponse(expectedResponse);
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
TestIamPermissionsResponse actualResponse = client.testIamPermissions(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
TestIamPermissionsRequest actualRequest = ((TestIamPermissionsRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPermissionsList(), actualRequest.getPermissionsList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void testIamPermissionsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
client.testIamPermissions(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
googleapis/google-cloud-java | 38,174 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/SetIamPolicyBackendServiceRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for BackendServices.SetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.SetIamPolicyBackendServiceRequest}
*/
public final class SetIamPolicyBackendServiceRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.SetIamPolicyBackendServiceRequest)
SetIamPolicyBackendServiceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SetIamPolicyBackendServiceRequest.newBuilder() to construct.
private SetIamPolicyBackendServiceRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SetIamPolicyBackendServiceRequest() {
project_ = "";
resource_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SetIamPolicyBackendServiceRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicyBackendServiceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicyBackendServiceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest.class,
com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest.Builder.class);
}
private int bitField0_;
public static final int GLOBAL_SET_POLICY_REQUEST_RESOURCE_FIELD_NUMBER = 337048498;
private com.google.cloud.compute.v1.GlobalSetPolicyRequest globalSetPolicyRequestResource_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the globalSetPolicyRequestResource field is set.
*/
@java.lang.Override
public boolean hasGlobalSetPolicyRequestResource() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The globalSetPolicyRequestResource.
*/
@java.lang.Override
public com.google.cloud.compute.v1.GlobalSetPolicyRequest getGlobalSetPolicyRequestResource() {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder
getGlobalSetPolicyRequestResourceOrBuilder() {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 195806222;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(337048498, getGlobalSetPolicyRequestResource());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
337048498, getGlobalSetPolicyRequestResource());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest other =
(com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest) obj;
if (hasGlobalSetPolicyRequestResource() != other.hasGlobalSetPolicyRequestResource())
return false;
if (hasGlobalSetPolicyRequestResource()) {
if (!getGlobalSetPolicyRequestResource().equals(other.getGlobalSetPolicyRequestResource()))
return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getResource().equals(other.getResource())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGlobalSetPolicyRequestResource()) {
hash = (37 * hash) + GLOBAL_SET_POLICY_REQUEST_RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getGlobalSetPolicyRequestResource().hashCode();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for BackendServices.SetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.SetIamPolicyBackendServiceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.SetIamPolicyBackendServiceRequest)
com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicyBackendServiceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicyBackendServiceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest.class,
com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getGlobalSetPolicyRequestResourceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
globalSetPolicyRequestResource_ = null;
if (globalSetPolicyRequestResourceBuilder_ != null) {
globalSetPolicyRequestResourceBuilder_.dispose();
globalSetPolicyRequestResourceBuilder_ = null;
}
project_ = "";
resource_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicyBackendServiceRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest build() {
com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest buildPartial() {
com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest result =
new com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.globalSetPolicyRequestResource_ =
globalSetPolicyRequestResourceBuilder_ == null
? globalSetPolicyRequestResource_
: globalSetPolicyRequestResourceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.resource_ = resource_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest) {
return mergeFrom((com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest other) {
if (other
== com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest.getDefaultInstance())
return this;
if (other.hasGlobalSetPolicyRequestResource()) {
mergeGlobalSetPolicyRequestResource(other.getGlobalSetPolicyRequestResource());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1566449778:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1566449778
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -1598579310:
{
input.readMessage(
getGlobalSetPolicyRequestResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case -1598579310
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.compute.v1.GlobalSetPolicyRequest globalSetPolicyRequestResource_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.GlobalSetPolicyRequest,
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder,
com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder>
globalSetPolicyRequestResourceBuilder_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the globalSetPolicyRequestResource field is set.
*/
public boolean hasGlobalSetPolicyRequestResource() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The globalSetPolicyRequestResource.
*/
public com.google.cloud.compute.v1.GlobalSetPolicyRequest getGlobalSetPolicyRequestResource() {
if (globalSetPolicyRequestResourceBuilder_ == null) {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
} else {
return globalSetPolicyRequestResourceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGlobalSetPolicyRequestResource(
com.google.cloud.compute.v1.GlobalSetPolicyRequest value) {
if (globalSetPolicyRequestResourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
globalSetPolicyRequestResource_ = value;
} else {
globalSetPolicyRequestResourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGlobalSetPolicyRequestResource(
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder builderForValue) {
if (globalSetPolicyRequestResourceBuilder_ == null) {
globalSetPolicyRequestResource_ = builderForValue.build();
} else {
globalSetPolicyRequestResourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeGlobalSetPolicyRequestResource(
com.google.cloud.compute.v1.GlobalSetPolicyRequest value) {
if (globalSetPolicyRequestResourceBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& globalSetPolicyRequestResource_ != null
&& globalSetPolicyRequestResource_
!= com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()) {
getGlobalSetPolicyRequestResourceBuilder().mergeFrom(value);
} else {
globalSetPolicyRequestResource_ = value;
}
} else {
globalSetPolicyRequestResourceBuilder_.mergeFrom(value);
}
if (globalSetPolicyRequestResource_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearGlobalSetPolicyRequestResource() {
bitField0_ = (bitField0_ & ~0x00000001);
globalSetPolicyRequestResource_ = null;
if (globalSetPolicyRequestResourceBuilder_ != null) {
globalSetPolicyRequestResourceBuilder_.dispose();
globalSetPolicyRequestResourceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder
getGlobalSetPolicyRequestResourceBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getGlobalSetPolicyRequestResourceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder
getGlobalSetPolicyRequestResourceOrBuilder() {
if (globalSetPolicyRequestResourceBuilder_ != null) {
return globalSetPolicyRequestResourceBuilder_.getMessageOrBuilder();
} else {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.GlobalSetPolicyRequest,
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder,
com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder>
getGlobalSetPolicyRequestResourceFieldBuilder() {
if (globalSetPolicyRequestResourceBuilder_ == null) {
globalSetPolicyRequestResourceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.GlobalSetPolicyRequest,
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder,
com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder>(
getGlobalSetPolicyRequestResource(), getParentForChildren(), isClean());
globalSetPolicyRequestResource_ = null;
}
return globalSetPolicyRequestResourceBuilder_;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.SetIamPolicyBackendServiceRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.SetIamPolicyBackendServiceRequest)
private static final com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest();
}
public static com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SetIamPolicyBackendServiceRequest> PARSER =
new com.google.protobuf.AbstractParser<SetIamPolicyBackendServiceRequest>() {
@java.lang.Override
public SetIamPolicyBackendServiceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SetIamPolicyBackendServiceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SetIamPolicyBackendServiceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicyBackendServiceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ranger | 37,818 | security-admin/src/main/java/org/apache/ranger/rest/PublicAPIsv2.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ranger.rest;
import org.apache.commons.lang.StringUtils;
import org.apache.ranger.admin.client.datatype.RESTResponse;
import org.apache.ranger.biz.SecurityZoneDBStore;
import org.apache.ranger.common.RESTErrorUtil;
import org.apache.ranger.common.annotation.RangerAnnotationJSMgrName;
import org.apache.ranger.plugin.model.RangerPluginInfo;
import org.apache.ranger.plugin.model.RangerPolicy;
import org.apache.ranger.plugin.model.RangerRole;
import org.apache.ranger.plugin.model.RangerSecurityZone;
import org.apache.ranger.plugin.model.RangerSecurityZoneHeaderInfo;
import org.apache.ranger.plugin.model.RangerSecurityZoneV2;
import org.apache.ranger.plugin.model.RangerSecurityZoneV2.RangerSecurityZoneChangeRequest;
import org.apache.ranger.plugin.model.RangerSecurityZoneV2.RangerSecurityZoneResource;
import org.apache.ranger.plugin.model.RangerService;
import org.apache.ranger.plugin.model.RangerServiceDef;
import org.apache.ranger.plugin.model.RangerServiceHeaderInfo;
import org.apache.ranger.plugin.model.RangerServiceResource;
import org.apache.ranger.plugin.model.RangerServiceTags;
import org.apache.ranger.plugin.store.PList;
import org.apache.ranger.plugin.util.GrantRevokeRoleRequest;
import org.apache.ranger.plugin.util.RangerPurgeResult;
import org.apache.ranger.plugin.util.ServiceTags;
import org.apache.ranger.security.context.RangerAPIList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@Path("public/v2")
@Component
@Scope("request")
@RangerAnnotationJSMgrName("PublicMgr")
@Transactional(propagation = Propagation.REQUIRES_NEW)
public class PublicAPIsv2 {
private static final Logger logger = LoggerFactory.getLogger(PublicAPIsv2.class);
@Autowired
ServiceREST serviceREST;
@Autowired
TagREST tagREST;
@Autowired
SecurityZoneREST securityZoneRest;
@Autowired
RoleREST roleREST;
@Autowired
RESTErrorUtil restErrorUtil;
@Autowired
SecurityZoneDBStore securityZoneStore;
/*
* SecurityZone Creation API
*/
@POST
@Path("/api/zones")
@Consumes("application/json")
@Produces("application/json")
public RangerSecurityZone createSecurityZone(RangerSecurityZone securityZone) {
return securityZoneRest.createSecurityZone(securityZone);
}
/*
* SecurityZone Manipulation API
*/
@PUT
@Path("/api/zones/{id}")
@Consumes("application/json")
@Produces("application/json")
public RangerSecurityZone updateSecurityZone(@PathParam("id") Long zoneId, RangerSecurityZone securityZone) {
return securityZoneRest.updateSecurityZone(zoneId, securityZone);
}
@DELETE
@Path("/api/zones/name/{name}")
public void deleteSecurityZone(@PathParam("name") String zoneName) {
securityZoneRest.deleteSecurityZone(zoneName);
}
@DELETE
@Path("/api/zones/{id}")
public void deleteSecurityZone(@PathParam("id") Long zoneId) {
securityZoneRest.deleteSecurityZone(zoneId);
}
/*
* API's to Access SecurityZones
*/
@GET
@Path("/api/zones/name/{name}")
@Produces("application/json")
public RangerSecurityZone getSecurityZone(@PathParam("name") String zoneName) {
return securityZoneRest.getSecurityZone(zoneName);
}
@GET
@Path("/api/zones/{id}")
@Produces("application/json")
public RangerSecurityZone getSecurityZone(@PathParam("id") Long id) {
return securityZoneRest.getSecurityZone(id);
}
@GET
@Path("/api/zones")
@Produces("application/json")
public List<RangerSecurityZone> getAllZones(@Context HttpServletRequest request) {
return securityZoneRest.getAllZones(request).getSecurityZones();
}
/**
* Get {@link List} of security zone header info.
* This API is authorized to every authenticated user.
*
* @return {@link List} of {@link RangerSecurityZoneHeaderInfo} if present.
*/
@GET
@Path("/api/zone-headers")
@Produces("application/json")
public List<RangerSecurityZoneHeaderInfo> getSecurityZoneHeaderInfoList(@Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.getSecurityZoneHeaderInfoList()");
List<RangerSecurityZoneHeaderInfo> ret;
try {
ret = securityZoneStore.getSecurityZoneHeaderInfoList(request);
} catch (WebApplicationException excp) {
throw excp;
} catch (Throwable excp) {
logger.error("PublicAPIsv2.getSecurityZoneHeaderInfoList() failed", excp);
throw restErrorUtil.createRESTException(excp.getMessage());
}
logger.debug("<== PublicAPIsv2.getSecurityZoneHeaderInfoList():{}", ret);
return ret;
}
/**
* Get {@link List} of security zone header info.
* This API is authorized to every authenticated user.
*
* @param serviceId
* @param isTagService
* @return {@link List} of {@link RangerSecurityZoneHeaderInfo} if present.
*/
@GET
@Path("/api/zones/zone-headers/for-service/{serviceId}")
@Produces("application/json")
public List<RangerSecurityZoneHeaderInfo> getSecurityZoneHeaderInfoListByServiceId(@PathParam("serviceId") Long serviceId, @DefaultValue("false") @QueryParam("isTagService") Boolean isTagService, @Context HttpServletRequest request) {
return securityZoneRest.getSecurityZoneHeaderInfoListByServiceId(serviceId, isTagService, request);
}
/**
* Get service header info {@link List} for given zone.
* This API is authorized to every authenticated user.
*
* @param zoneId
* @return {@link List} of {@link RangerServiceHeaderInfo} for given zone if present.
*/
@GET
@Path("/api/zones/{zoneId}/service-headers")
@Produces("application/json")
public List<RangerServiceHeaderInfo> getServiceHeaderInfoListByZoneId(@PathParam("zoneId") Long zoneId, @Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.getServiceHeaderInfoListByZoneId({})", zoneId);
List<RangerServiceHeaderInfo> ret;
try {
ret = securityZoneStore.getServiceHeaderInfoListByZoneId(zoneId, request);
} catch (WebApplicationException excp) {
throw excp;
} catch (Throwable excp) {
logger.error("PublicAPIsv2.getServiceHeaderInfoListByZoneId() failed", excp);
throw restErrorUtil.createRESTException(excp.getMessage());
}
logger.debug("<== PublicAPIsv2.getServiceHeaderInfoListByZoneId():{}", ret);
return ret;
}
@GET
@Path("/api/zone-names/{serviceName}/resource")
@Produces("application/json")
@PreAuthorize("@rangerPreAuthSecurityHandler.isAPISpnegoAccessible()")
public Collection<String> getSecurityZoneNamesForResource(@PathParam("serviceName") String serviceName, @Context HttpServletRequest request) {
return securityZoneRest.getZoneNamesForResource(serviceName, request);
}
@POST
@Path("/api/zones-v2")
@Consumes("application/json")
@Produces("application/json")
public RangerSecurityZoneV2 createSecurityZone(RangerSecurityZoneV2 securityZone) {
return securityZoneRest.createSecurityZone(securityZone);
}
@PUT
@Path("/api/zones-v2/{id}")
@Consumes("application/json")
@Produces("application/json")
public RangerSecurityZoneV2 updateSecurityZone(@PathParam("id") Long zoneId, RangerSecurityZoneV2 securityZone) {
return securityZoneRest.updateSecurityZone(zoneId, securityZone);
}
@PUT
@Path("/api/zones-v2/{id}/partial")
@Consumes("application/json")
@Produces("application/json")
public Boolean updateSecurityZone(@PathParam("id") Long zoneId, RangerSecurityZoneChangeRequest changeRequest) {
return securityZoneRest.updateSecurityZone(zoneId, changeRequest);
}
@GET
@Path("/api/zones-v2/name/{name}")
@Produces("application/json")
public RangerSecurityZoneV2 getSecurityZoneV2(@PathParam("name") String zoneName) {
return securityZoneRest.getSecurityZoneV2(zoneName);
}
@GET
@Path("/api/zones-v2/{id}")
@Produces("application/json")
public RangerSecurityZoneV2 getSecurityZoneV2(@PathParam("id") Long zoneId) {
return securityZoneRest.getSecurityZoneV2(zoneId);
}
@GET
@Path("/api/zones-v2/{id}/resources/{serviceName}")
@Produces("application/json")
public PList<RangerSecurityZoneResource> getResources(@PathParam("id") Long zoneId, @PathParam("serviceName") String serviceName, @Context HttpServletRequest request) {
return securityZoneRest.getResources(zoneId, serviceName, request);
}
@GET
@Path("/api/zones-v2/name/{name}/resources/{serviceName}")
@Produces("application/json")
public PList<RangerSecurityZoneResource> getResources(@PathParam("name") String zoneName, @PathParam("serviceName") String serviceName, @Context HttpServletRequest request) {
return securityZoneRest.getResources(zoneName, serviceName, request);
}
@GET
@Path("/api/zones-v2")
@Produces("application/json")
public PList<RangerSecurityZoneV2> getAllZonesV2(@Context HttpServletRequest request) {
return securityZoneRest.getAllZonesV2(request);
}
/*
* ServiceDef Manipulation APIs
*/
@GET
@Path("/api/servicedef/{id}")
@Produces("application/json")
public RangerServiceDef getServiceDef(@PathParam("id") Long id) {
return serviceREST.getServiceDef(id);
}
@GET
@Path("/api/servicedef/name/{name}")
@Produces("application/json")
public RangerServiceDef getServiceDefByName(@PathParam("name") String name) {
return serviceREST.getServiceDefByName(name);
}
@GET
@Path("/api/servicedef/")
@Produces("application/json")
public List<RangerServiceDef> searchServiceDefs(@Context HttpServletRequest request) {
return serviceREST.getServiceDefs(request).getServiceDefs();
}
@POST
@Path("/api/servicedef/")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
@Consumes("application/json")
@Produces("application/json")
public RangerServiceDef createServiceDef(RangerServiceDef serviceDef) {
return serviceREST.createServiceDef(serviceDef);
}
@PUT
@Path("/api/servicedef/{id}")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
@Consumes("application/json")
@Produces("application/json")
public RangerServiceDef updateServiceDef(RangerServiceDef serviceDef, @PathParam("id") Long id) {
// if serviceDef.id is specified, it should be same as param 'id'
if (serviceDef.getId() == null) {
serviceDef.setId(id);
} else if (!serviceDef.getId().equals(id)) {
throw restErrorUtil.createRESTException(HttpServletResponse.SC_BAD_REQUEST, "serviceDef id mismatch", true);
}
return serviceREST.updateServiceDef(serviceDef, serviceDef.getId());
}
@PUT
@Path("/api/servicedef/name/{name}")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
@Consumes("application/json")
@Produces("application/json")
public RangerServiceDef updateServiceDefByName(RangerServiceDef serviceDef, @PathParam("name") String name) {
// serviceDef.name is immutable
// if serviceDef.name is specified, it should be same as the param 'name'
if (serviceDef.getName() == null) {
serviceDef.setName(name);
} else if (!serviceDef.getName().equals(name)) {
throw restErrorUtil.createRESTException(HttpServletResponse.SC_BAD_REQUEST, "serviceDef name mismatch", true);
}
// ignore serviceDef.id - if specified. Retrieve using the given name and use id from the retrieved object
RangerServiceDef existingServiceDef = getServiceDefByName(name);
serviceDef.setId(existingServiceDef.getId());
if (StringUtils.isEmpty(serviceDef.getGuid())) {
serviceDef.setGuid(existingServiceDef.getGuid());
}
return serviceREST.updateServiceDef(serviceDef, serviceDef.getId());
}
/*
* Should add this back when guid is used for search and delete operations as well
@PUT
@Path("/api/servicedef/guid/{guid}")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
@Produces({ "application/json" })
public RangerServiceDef updateServiceDefByGuid(RangerServiceDef serviceDef, @PathParam("guid") String guid) {
// ignore serviceDef.id - if specified. Retrieve using the given guid and use id from the retrieved object
RangerServiceDef existingServiceDef = getServiceDefByGuid(guid);
serviceDef.setId(existingServiceDef.getId());
if(StringUtils.isEmpty(serviceDef.getGuid())) {
serviceDef.setGuid(existingServiceDef.getGuid());
}
return serviceREST.updateServiceDef(serviceDef);
}
*/
@DELETE
@Path("/api/servicedef/{id}")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
public void deleteServiceDef(@PathParam("id") Long id, @Context HttpServletRequest request) {
serviceREST.deleteServiceDef(id, request);
}
@DELETE
@Path("/api/servicedef/name/{name}")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
public void deleteServiceDefByName(@PathParam("name") String name, @Context HttpServletRequest request) {
RangerServiceDef serviceDef = serviceREST.getServiceDefByName(name);
serviceREST.deleteServiceDef(serviceDef.getId(), request);
}
/*
* Service Manipulation APIs
*/
@GET
@Path("/api/service/{id}")
@Produces("application/json")
@PreAuthorize("@rangerPreAuthSecurityHandler.isAPISpnegoAccessible()")
public RangerService getService(@PathParam("id") Long id) {
return serviceREST.getService(id);
}
@GET
@Path("/api/service/name/{name}")
@Produces("application/json")
@PreAuthorize("@rangerPreAuthSecurityHandler.isAPISpnegoAccessible()")
public RangerService getServiceByName(@PathParam("name") String name) {
return serviceREST.getServiceByName(name);
}
@GET
@Path("/api/service/")
@Produces("application/json")
@PreAuthorize("@rangerPreAuthSecurityHandler.isAPISpnegoAccessible()")
public List<RangerService> searchServices(@Context HttpServletRequest request) {
return serviceREST.getServices(request).getServices();
}
@GET
@Path("/api/service-headers")
@Produces("application/json")
@PreAuthorize("@rangerPreAuthSecurityHandler.isAPIAccessible(\"" + RangerAPIList.GET_SERVICE_HEADERS + "\")")
public List<RangerServiceHeaderInfo> getServiceHeaders(@Context HttpServletRequest request) {
return serviceREST.getServiceHeaders(request);
}
@POST
@Path("/api/service/")
@PreAuthorize("@rangerPreAuthSecurityHandler.isAPISpnegoAccessible()")
@Consumes("application/json")
@Produces("application/json")
public RangerService createService(RangerService service) {
return serviceREST.createService(service);
}
@PUT
@Path("/api/service/{id}")
@PreAuthorize("@rangerPreAuthSecurityHandler.isAPISpnegoAccessible()")
@Consumes("application/json")
@Produces("application/json")
public RangerService updateService(RangerService service, @PathParam("id") Long id, @Context HttpServletRequest request) {
// if service.id is specified, it should be same as the param 'id'
if (service.getId() == null) {
service.setId(id);
} else if (!service.getId().equals(id)) {
throw restErrorUtil.createRESTException(HttpServletResponse.SC_BAD_REQUEST, "service id mismatch", true);
}
return serviceREST.updateService(service, request);
}
@PUT
@Path("/api/service/name/{name}")
@PreAuthorize("@rangerPreAuthSecurityHandler.isAPISpnegoAccessible()")
@Consumes("application/json")
@Produces("application/json")
public RangerService updateServiceByName(RangerService service, @PathParam("name") String name, @Context HttpServletRequest request) {
// ignore service.id - if specified. Retrieve using the given name and use id from the retrieved object
RangerService existingService = getServiceByName(name);
service.setId(existingService.getId());
if (StringUtils.isEmpty(service.getGuid())) {
service.setGuid(existingService.getGuid());
}
if (StringUtils.isEmpty(service.getName())) {
service.setName(existingService.getName());
}
return serviceREST.updateService(service, request);
}
/*
* Should add this back when guid is used for search and delete operations as well
@PUT
@Path("/api/service/guid/{guid}")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
@Produces({ "application/json" })
public RangerService updateServiceByGuid(RangerService service, @PathParam("guid") String guid) {
// ignore service.id - if specified. Retrieve using the given guid and use id from the retrieved object
RangerService existingService = getServiceByGuid(guid);
service.setId(existingService.getId());
if(StringUtils.isEmpty(service.getGuid())) {
service.setGuid(existingService.getGuid());
}
return serviceREST.updateService(service);
}
*/
@DELETE
@Path("/api/service/{id}")
@PreAuthorize("@rangerPreAuthSecurityHandler.isAPISpnegoAccessible()")
public void deleteService(@PathParam("id") Long id) {
serviceREST.deleteService(id);
}
@DELETE
@Path("/api/service/name/{name}")
@PreAuthorize("@rangerPreAuthSecurityHandler.isAPISpnegoAccessible()")
public void deleteServiceByName(@PathParam("name") String name) {
RangerService service = serviceREST.getServiceByName(name);
serviceREST.deleteService(service.getId());
}
/*
* Policy Manipulation APIs
*/
@GET
@Path("/api/policy/{id}")
@Produces("application/json")
public RangerPolicy getPolicy(@PathParam("id") Long id) {
return serviceREST.getPolicy(id);
}
@GET
@Path("/api/policy/")
@Produces("application/json")
public List<RangerPolicy> getPolicies(@Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.getPolicies()");
List<RangerPolicy> ret = serviceREST.getPolicies(request).getPolicies();
boolean includeMetaAttributes = Boolean.parseBoolean(request.getParameter("includeMetaAttributes"));
if (includeMetaAttributes) {
ret = serviceREST.getPoliciesWithMetaAttributes(ret);
}
logger.debug("<== PublicAPIsv2.getPolicies(Request: {} Result Size: {}", request.getQueryString(), ret.size());
return ret;
}
@GET
@Path("/api/service/{servicename}/policy/{policyname}")
@Produces("application/json")
public RangerPolicy getPolicyByName(@PathParam("servicename") String serviceName, @PathParam("policyname") String policyName, @QueryParam("zoneName") String zoneName, @Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.getPolicyByName({}, {}, {})", serviceName, policyName, zoneName);
RangerPolicy policy = serviceREST.getPolicyByName(serviceName, policyName, zoneName);
if (policy == null) {
throw restErrorUtil.createRESTException(HttpServletResponse.SC_NOT_FOUND, "Not found", true);
}
logger.debug("<== PublicAPIsv2.getPolicyByName({}, {}, {}): {}", serviceName, policyName, zoneName, policy);
return policy;
}
@GET
@Path("/api/service/{servicename}/policy/")
@Produces("application/json")
public List<RangerPolicy> searchPolicies(@PathParam("servicename") String serviceName, @Context HttpServletRequest request) {
return serviceREST.getServicePoliciesByName(serviceName, request).getPolicies();
}
@GET
@Path("/api/policies/{serviceDefName}/for-resource/")
@Produces("application/json")
public List<RangerPolicy> getPoliciesForResource(@PathParam("serviceDefName") String serviceDefName, @DefaultValue("") @QueryParam("serviceName") String serviceName, @Context HttpServletRequest request) {
return serviceREST.getPoliciesForResource(serviceDefName, serviceName, request);
}
@GET
@Path("/api/policy/guid/{guid}")
@Produces("application/json")
public RangerPolicy getPolicyByGUIDAndServiceNameAndZoneName(@PathParam("guid") String guid, @DefaultValue("") @QueryParam("serviceName") String serviceName, @DefaultValue("") @QueryParam("ZoneName") String zoneName) {
logger.debug("==> PublicAPIsv2.getPolicyByGUIDAndServiceNameAndZoneName({}, {}, {})", guid, serviceName, zoneName);
RangerPolicy rangerPolicy = serviceREST.getPolicyByGUIDAndServiceNameAndZoneName(guid, serviceName, zoneName);
logger.debug("<== PublicAPIsv2.getPolicyByGUIDAndServiceNameAndZoneName({}, {}, {})", guid, serviceName, zoneName);
return rangerPolicy;
}
@POST
@Path("/api/policy/")
@Consumes("application/json")
@Produces("application/json")
public RangerPolicy createPolicy(RangerPolicy policy, @Context HttpServletRequest request) {
return serviceREST.createPolicy(policy, request);
}
@POST
@Path("/api/policy/apply/")
@Consumes("application/json")
@Produces("application/json")
public RangerPolicy applyPolicy(RangerPolicy policy, @Context HttpServletRequest request) { // new API
return serviceREST.applyPolicy(policy, request);
}
@PUT
@Path("/api/policy/{id}")
@Consumes("application/json")
@Produces("application/json")
public RangerPolicy updatePolicy(RangerPolicy policy, @PathParam("id") Long id) {
// if policy.id is specified, it should be same as the param 'id'
if (policy.getId() == null) {
policy.setId(id);
} else if (!policy.getId().equals(id)) {
throw restErrorUtil.createRESTException(HttpServletResponse.SC_BAD_REQUEST, "policyID mismatch", true);
}
return serviceREST.updatePolicy(policy, id);
}
@PUT
@Path("/api/service/{servicename}/policy/{policyname}")
@Consumes("application/json")
@Produces("application/json")
public RangerPolicy updatePolicyByName(RangerPolicy policy, @PathParam("servicename") String serviceName, @PathParam("policyname") String policyName, @QueryParam("zoneName") String zoneName, @Context HttpServletRequest request) {
if (policy.getService() == null || !policy.getService().equals(serviceName)) {
throw restErrorUtil.createRESTException(HttpServletResponse.SC_BAD_REQUEST, "service name mismatch", true);
}
RangerPolicy oldPolicy = getPolicyByName(serviceName, policyName, zoneName, request);
// ignore policy.id - if specified. Retrieve using the given serviceName+policyName and use id from the retrieved object
policy.setId(oldPolicy.getId());
if (StringUtils.isEmpty(policy.getGuid())) {
policy.setGuid(oldPolicy.getGuid());
}
if (StringUtils.isEmpty(policy.getName())) {
policy.setName(StringUtils.trim(oldPolicy.getName()));
}
return serviceREST.updatePolicy(policy, policy.getId());
}
/* Should add this back when guid is used for search and delete operations as well
@PUT
@Path("/api/policy/guid/{guid}")
@Produces({ "application/json" })
public RangerPolicy updatePolicyByGuid(RangerPolicy policy, @PathParam("guid") String guid) {
// ignore policy.guid - if specified. Retrieve using the given guid and use id from the retrieved object
RangerPolicy existingPolicy = getPolicyByGuid(name);
policy.setId(existingPolicy.getId());
if(StringUtils.isEmpty(policy.getGuid())) {
policy.setGuid(existingPolicy.getGuid());
}
return serviceREST.updatePolicy(policy);
}
*/
@DELETE
@Path("/api/policy/{id}")
public void deletePolicy(@PathParam("id") Long id) {
serviceREST.deletePolicy(id);
}
@DELETE
@Path("/api/policy")
public void deletePolicyByName(@QueryParam("servicename") String serviceName, @QueryParam("policyname") String policyName, @QueryParam("zoneName") String zoneName, @Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.deletePolicyByName({}, {})", serviceName, policyName);
if (serviceName == null || policyName == null) {
throw restErrorUtil.createRESTException(HttpServletResponse.SC_BAD_REQUEST, "Invalid service name or policy name", true);
}
RangerPolicy policy = getPolicyByName(serviceName, policyName, zoneName, request);
serviceREST.deletePolicy(policy.getId());
logger.debug("<== PublicAPIsv2.deletePolicyByName({}, {})", serviceName, policyName);
}
@DELETE
@Path("/api/policies/bulk")
@Produces("application/json")
public List<Long> deletePolicies(@Context HttpServletRequest request, @QueryParam("serviceName") String serviceName) {
logger.debug("==> PublicAPIsv2.deletePolicies()");
if (StringUtils.isBlank(serviceName)) {
throw restErrorUtil.createRESTException(HttpServletResponse.SC_BAD_REQUEST, "Invalid service name", true);
}
List<Long> ret = serviceREST.deleteBulkPolicies(serviceName, request);
Collections.sort(ret);
logger.debug("<== PublicAPIsv2.deletePolicies()");
return ret;
}
@DELETE
@Path("/api/policy/guid/{guid}")
public void deletePolicyByGUIDAndServiceNameAndZoneName(@PathParam("guid") String guid, @DefaultValue("") @QueryParam("serviceName") String serviceName, @DefaultValue("") @QueryParam("zoneName") String zoneName) {
logger.debug("==> PublicAPIsv2.deletePolicyByGUIDAndServiceNameAndZoneName({}, {}, {})", guid, serviceName, zoneName);
serviceREST.deletePolicyByGUIDAndServiceNameAndZoneName(guid, serviceName, zoneName);
logger.debug("<== PublicAPIsv2.deletePolicyByGUIDAndServiceNameAndZoneName({}, {}, {})", guid, serviceName, zoneName);
}
@PUT
@Path("/api/service/{serviceName}/tags")
@Consumes("application/json")
@Produces("application/json")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
public void importServiceTags(@PathParam("serviceName") String serviceName, RangerServiceTags svcTags) {
logger.debug("==> PublicAPIsv2.importServiceTags()");
// overwrite serviceName with the one given in url
if (svcTags.getServiceResources() != null) {
for (RangerServiceResource svcResource : svcTags.getServiceResources()) {
svcResource.setServiceName(serviceName);
}
}
ServiceTags serviceTags = RangerServiceTags.toServiceTags(svcTags);
// overwrite serviceName with the one given in url
serviceTags.setServiceName(serviceName);
tagREST.importServiceTags(serviceTags);
logger.debug("<== PublicAPIsv2.importServiceTags()");
}
@GET
@Path("/api/service/{serviceName}/tags")
@Produces("application/json")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
public RangerServiceTags getServiceTags(@PathParam("serviceName") String serviceName, @Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.getServiceTags()");
Long lastKnownVersion = -1L;
Long lastActivationTime = 0L;
String pluginId = null;
Boolean supportsTagDeltas = false;
String pluginCapabilities = "";
ServiceTags tags = tagREST.getServiceTagsIfUpdated(serviceName, lastKnownVersion, lastActivationTime, pluginId, supportsTagDeltas, pluginCapabilities, request);
RangerServiceTags ret = RangerServiceTags.toRangerServiceTags(tags);
logger.debug("<== PublicAPIsv2.getServiceTags()");
return ret;
}
@GET
@Path("/api/plugins/info")
@Produces("application/json")
public List<RangerPluginInfo> getPluginsInfo(@Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.getPluginsInfo()");
List<RangerPluginInfo> ret = serviceREST.getPluginsInfo(request).getPluginInfoList();
logger.debug("<== PublicAPIsv2.getPluginsInfo()");
return ret;
}
@DELETE
@Path("/api/server/policydeltas")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
public void deletePolicyDeltas(@DefaultValue("7") @QueryParam("days") Integer olderThan, @Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.deletePolicyDeltas({})", olderThan);
serviceREST.deletePolicyDeltas(olderThan, request);
logger.debug("<== PublicAPIsv2.deletePolicyDeltas({})", olderThan);
}
@DELETE
@Path("/api/server/tagdeltas")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
public void deleteTagDeltas(@DefaultValue("7") @QueryParam("days") Integer olderThan, @Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.deleteTagDeltas({})", olderThan);
tagREST.deleteTagDeltas(olderThan, request);
logger.debug("<== PublicAPIsv2.deleteTagDeltas({})", olderThan);
}
@DELETE
@Path("/api/server/purgepolicies/{serviceName}")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
public void purgeEmptyPolicies(@PathParam("serviceName") String serviceName, @Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.purgeEmptyPolicies({})", serviceName);
if (serviceName == null) {
throw restErrorUtil.createRESTException(HttpServletResponse.SC_BAD_REQUEST, "Invalid service name", true);
}
serviceREST.purgeEmptyPolicies(serviceName, request);
logger.debug("<== PublicAPIsv2.purgeEmptyPolicies({})", serviceName);
}
/*
* Role Creation API
*/
@POST
@Path("/api/roles")
@Consumes("application/json")
@Produces("application/json")
public RangerRole createRole(@QueryParam("serviceName") String serviceName, RangerRole role, @DefaultValue("false") @QueryParam("createNonExistUserGroup") Boolean createNonExistUserGroup, @Context HttpServletRequest request) {
logger.info("==> PublicAPIsv2.createRole");
RangerRole ret = roleREST.createRole(serviceName, role, createNonExistUserGroup);
logger.info("<== PublicAPIsv2.createRole({})", ret.getName());
return ret;
}
/*
* Role Manipulation API
*/
@PUT
@Path("/api/roles/{id}")
@Consumes("application/json")
@Produces("application/json")
public RangerRole updateRole(@PathParam("id") Long roleId, RangerRole role, @DefaultValue("false") @QueryParam("createNonExistUserGroup") Boolean createNonExistUserGroup, @Context HttpServletRequest request) {
return roleREST.updateRole(roleId, role, createNonExistUserGroup);
}
@DELETE
@Path("/api/roles/name/{name}")
public void deleteRole(@QueryParam("serviceName") String serviceName, @QueryParam("execUser") String userName, @PathParam("name") String roleName, @Context HttpServletRequest request) {
roleREST.deleteRole(serviceName, userName, roleName);
}
@DELETE
@Path("/api/roles/{id}")
public void deleteRole(@PathParam("id") Long roleId, @Context HttpServletRequest request) {
roleREST.deleteRole(roleId);
}
/*
* APIs to Access Roles
*/
@GET
@Path("/api/roles/name/{name}")
@Produces("application/json")
public RangerRole getRole(@QueryParam("serviceName") String serviceName, @QueryParam("execUser") String userName, @PathParam("name") String roleName, @Context HttpServletRequest request) {
return roleREST.getRole(serviceName, userName, roleName);
}
@GET
@Path("/api/roles/{id}")
@Produces("application/json")
public RangerRole getRole(@PathParam("id") Long id, @Context HttpServletRequest request) {
return roleREST.getRole(id);
}
@GET
@Path("/api/roles")
@Produces("application/json")
public List<RangerRole> getAllRoles(@Context HttpServletRequest request) {
return roleREST.getAllRoles(request).getSecurityRoles();
}
@GET
@Path("/api/roles/names")
@Produces("application/json")
public List<String> getAllRoleNames(@QueryParam("serviceName") String serviceName, @QueryParam("execUser") String userName, @Context HttpServletRequest request) {
return roleREST.getAllRoleNames(serviceName, userName, request);
}
@GET
@Path("/api/roles/user/{user}")
@Produces("application/json")
public List<String> getUserRoles(@PathParam("user") String userName, @Context HttpServletRequest request) {
return roleREST.getUserRoles(userName, request);
}
/*
This API is used to add users and groups with/without GRANT privileges to this Role. It follows add-or-update semantics
*/
@PUT
@Path("/api/roles/{id}/addUsersAndGroups")
@Consumes("application/json")
@Produces("application/json")
public RangerRole addUsersAndGroups(@PathParam("id") Long roleId, List<String> users, List<String> groups, Boolean isAdmin, @Context HttpServletRequest request) {
return roleREST.addUsersAndGroups(roleId, users, groups, isAdmin);
}
/*
This API is used to remove users and groups, without regard to their GRANT privilege, from this Role.
*/
@PUT
@Path("/api/roles/{id}/removeUsersAndGroups")
@Consumes("application/json")
@Produces("application/json")
public RangerRole removeUsersAndGroups(@PathParam("id") Long roleId, List<String> users, List<String> groups, @Context HttpServletRequest request) {
return roleREST.removeUsersAndGroups(roleId, users, groups);
}
/*
This API is used to remove GRANT privilege from listed users and groups.
*/
@PUT
@Path("/api/roles/{id}/removeAdminFromUsersAndGroups")
@Consumes("application/json")
@Produces("application/json")
public RangerRole removeAdminFromUsersAndGroups(@PathParam("id") Long roleId, List<String> users, List<String> groups, @Context HttpServletRequest request) {
return roleREST.removeAdminFromUsersAndGroups(roleId, users, groups);
}
/*
This API is used to add users and roles with/without GRANT privileges to this Role. It follows add-or-update semantics
*/
@PUT
@Path("/api/roles/grant/{serviceName}")
@Consumes("application/json")
@Produces("application/json")
public RESTResponse grantRole(@PathParam("serviceName") String serviceName, GrantRevokeRoleRequest grantRoleRequest, @Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.grantRoleUsersAndRoles({})", grantRoleRequest);
return roleREST.grantRole(serviceName, grantRoleRequest, request);
}
/*
This API is used to remove users and groups, without regard to their GRANT privilege, from this Role.
*/
@PUT
@Path("/api/roles/revoke/{serviceName}")
@Consumes("application/json")
@Produces("application/json")
public RESTResponse revokeRoleUsersAndRoles(@PathParam("serviceName") String serviceName, GrantRevokeRoleRequest revokeRoleRequest, @Context HttpServletRequest request) {
return roleREST.revokeRole(serviceName, revokeRoleRequest, request);
}
@DELETE
@Path("/api/server/purge/records")
@PreAuthorize("hasRole('ROLE_SYS_ADMIN')")
public List<RangerPurgeResult> purgeRecords(@QueryParam("type") String recordType, @DefaultValue("180") @QueryParam("retentionDays") Integer olderThan, @Context HttpServletRequest request) {
logger.debug("==> PublicAPIsv2.purgeRecords({}, {})", recordType, olderThan);
List<RangerPurgeResult> ret = serviceREST.purgeRecords(recordType, olderThan, request);
logger.debug("<== PublicAPIsv2.purgeRecords({}, {}): ret={}", recordType, olderThan, ret);
return ret;
}
}
|
apache/druid | 37,440 | processing/src/test/java/org/apache/druid/segment/filter/ArrayContainsElementFilterTests.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.filter;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.error.DruidException;
import org.apache.druid.guice.BuiltInTypesModule;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.query.filter.ArrayContainsElementFilter;
import org.apache.druid.query.filter.Filter;
import org.apache.druid.query.filter.FilterTuning;
import org.apache.druid.query.filter.NotDimFilter;
import org.apache.druid.segment.CursorFactory;
import org.apache.druid.segment.IndexBuilder;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.Closeable;
import java.util.Arrays;
public class ArrayContainsElementFilterTests
{
@RunWith(Parameterized.class)
public static class ArrayContainsElementFilterTest extends BaseFilterTest
{
public ArrayContainsElementFilterTest(
String testName,
IndexBuilder indexBuilder,
Function<IndexBuilder, Pair<CursorFactory, Closeable>> finisher,
boolean cnf,
boolean optimize
)
{
super(testName, DEFAULT_ROWS, indexBuilder, finisher, cnf, optimize);
}
@AfterClass
public static void tearDown() throws Exception
{
BaseFilterTest.tearDown(ArrayContainsElementFilterTest.class.getName());
}
@Test
public void testArrayStringColumn()
{
Assume.assumeFalse(testName.contains("frame (columnar)") || testName.contains("rowBasedWithoutTypeSignature"));
/*
dim0 .. arrayString
"0", .. ["a", "b", "c"]
"1", .. []
"2", .. null
"3", .. ["a", "b", "c"]
"4", .. ["c", "d"]
"5", .. [null]
*/
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayString",
ColumnType.STRING,
"a",
null
),
ImmutableList.of("0", "3")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"arrayString",
ColumnType.STRING,
"a",
null
)
),
ImmutableList.of("1", "4", "5")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayString",
ColumnType.STRING,
"c",
null
),
ImmutableList.of("0", "3", "4")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"arrayString",
ColumnType.STRING,
"c",
null
)
),
ImmutableList.of("1", "5")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayString",
ColumnType.STRING,
null,
null
),
ImmutableList.of("5")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"arrayString",
ColumnType.STRING,
null,
null
)
),
ImmutableList.of("0", "1", "3", "4")
);
}
@Test
public void testArrayLongColumn()
{
Assume.assumeFalse(testName.contains("frame (columnar)") || testName.contains("rowBasedWithoutTypeSignature"));
/*
dim0 .. arrayLong
"0", .. [1L, 2L, 3L]
"1", .. []
"2", .. [1L, 2L, 3L]
"3", .. null
"4", .. [null]
"5", .. [123L, 345L]
*/
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayLong",
ColumnType.LONG,
2L,
null
),
ImmutableList.of("0", "2")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"arrayLong",
ColumnType.LONG,
2L,
null
)
),
ImmutableList.of("1", "4", "5")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayLong",
ColumnType.LONG,
null,
null
),
ImmutableList.of("4")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"arrayLong",
ColumnType.LONG,
null,
null
)
),
ImmutableList.of("0", "1", "2", "5")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayLong",
ColumnType.DOUBLE,
2.0,
null
),
ImmutableList.of("0", "2")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayLong",
ColumnType.STRING,
"2",
null
),
ImmutableList.of("0", "2")
);
}
@Test
public void testArrayDoubleColumn()
{
Assume.assumeFalse(testName.contains("frame (columnar)") || testName.contains("rowBasedWithoutTypeSignature"));
/*
dim0 .. arrayDouble
"0", .. [1.1, 2.2, 3.3]
"1", .. [1.1, 2.2, 3.3]
"2", .. [null]
"3", .. []
"4", .. [-1.1, -333.3]
"5", .. null
*/
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayDouble",
ColumnType.DOUBLE,
2.2,
null
),
ImmutableList.of("0", "1")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"arrayDouble",
ColumnType.DOUBLE,
2.2,
null
)
),
ImmutableList.of("2", "3", "4")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayDouble",
ColumnType.STRING,
"2.2",
null
),
ImmutableList.of("0", "1")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayDouble",
ColumnType.DOUBLE,
null,
null
),
ImmutableList.of("2")
);
}
@Test
public void testArrayStringColumnContainsArrays()
{
Assume.assumeFalse(testName.contains("frame (columnar)") || testName.contains("rowBasedWithoutTypeSignature"));
// these are not nested arrays, expect no matches
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayString",
ColumnType.STRING_ARRAY,
ImmutableList.of("a", "b", "c"),
null
),
ImmutableList.of()
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"arrayString",
ColumnType.STRING_ARRAY,
ImmutableList.of("a", "b", "c"),
null
)
),
ImmutableList.of("0", "1", "3", "4", "5")
);
}
@Test
public void testArrayLongColumnContainsArrays()
{
Assume.assumeFalse(testName.contains("frame (columnar)") || testName.contains("rowBasedWithoutTypeSignature"));
// these are not nested arrays, expect no matches
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayLong",
ColumnType.LONG_ARRAY,
ImmutableList.of(1L, 2L, 3L),
null
),
ImmutableList.of()
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"arrayLong",
ColumnType.LONG_ARRAY,
ImmutableList.of(1L, 2L, 3L),
null
)
),
ImmutableList.of("0", "1", "2", "4", "5")
);
}
@Test
public void testArrayDoubleColumnContainsArrays()
{
Assume.assumeFalse(testName.contains("frame (columnar)") || testName.contains("rowBasedWithoutTypeSignature"));
// these are not nested arrays, expect no matches
assertFilterMatches(
new ArrayContainsElementFilter(
"arrayDouble",
ColumnType.DOUBLE_ARRAY,
ImmutableList.of(1.1, 2.2, 3.3),
null
),
ImmutableList.of()
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"arrayDouble",
ColumnType.DOUBLE_ARRAY,
ImmutableList.of(1.1, 2.2, 3.3),
null
)
),
ImmutableList.of("0", "1", "2", "3", "4")
);
}
@Test
public void testScalarColumnContains()
{
assertFilterMatches(
new ArrayContainsElementFilter("s0", ColumnType.STRING, "a", null),
ImmutableList.of("1", "5")
);
assertFilterMatches(
new ArrayContainsElementFilter("s0", ColumnType.STRING, "b", null),
ImmutableList.of("2")
);
assertFilterMatches(
new ArrayContainsElementFilter("s0", ColumnType.STRING, "c", null),
ImmutableList.of("4")
);
assertFilterMatches(
new ArrayContainsElementFilter("s0", ColumnType.STRING, "noexist", null),
ImmutableList.of()
);
assertFilterMatches(
new ArrayContainsElementFilter("s0", ColumnType.STRING_ARRAY, ImmutableList.of("c"), null),
ImmutableList.of("4")
);
assertFilterMatches(
new ArrayContainsElementFilter("s0", ColumnType.STRING_ARRAY, ImmutableList.of("a", "c"), null),
ImmutableList.of()
);
assertFilterMatches(
new ArrayContainsElementFilter("d0", ColumnType.DOUBLE, 10.1, null),
ImmutableList.of("1")
);
assertFilterMatches(
new ArrayContainsElementFilter("d0", ColumnType.DOUBLE, 120.0245, null),
ImmutableList.of("3")
);
assertFilterMatches(
new ArrayContainsElementFilter("d0", ColumnType.DOUBLE, 765.432, null),
ImmutableList.of("5")
);
assertFilterMatches(
new ArrayContainsElementFilter("d0", ColumnType.DOUBLE, 765.431, null),
ImmutableList.of()
);
assertFilterMatches(
new ArrayContainsElementFilter("d0", ColumnType.DOUBLE_ARRAY, new Object[]{10.1}, null),
ImmutableList.of("1")
);
assertFilterMatches(
new ArrayContainsElementFilter("d0", ColumnType.DOUBLE_ARRAY, new Object[]{10.1, 120.0245}, null),
ImmutableList.of()
);
assertFilterMatches(
new ArrayContainsElementFilter("l0", ColumnType.LONG, 100L, null),
ImmutableList.of("1")
);
assertFilterMatches(
new ArrayContainsElementFilter("l0", ColumnType.LONG, 40L, null),
ImmutableList.of("2")
);
assertFilterMatches(
new ArrayContainsElementFilter("l0", ColumnType.LONG, 9001L, null),
ImmutableList.of("4")
);
assertFilterMatches(
new ArrayContainsElementFilter("l0", ColumnType.LONG, 9000L, null),
ImmutableList.of()
);
assertFilterMatches(
new ArrayContainsElementFilter("l0", ColumnType.LONG_ARRAY, ImmutableList.of(9001L), null),
ImmutableList.of("4")
);
assertFilterMatches(
new ArrayContainsElementFilter("l0", ColumnType.LONG_ARRAY, ImmutableList.of(40L, 9001L), null),
ImmutableList.of()
);
}
@Test
public void testArrayContainsNestedArray()
{
// only auto schema supports array columns... skip other segment types
Assume.assumeFalse(testName.contains("frame (columnar)") || testName.contains("rowBasedWithoutTypeSignature"));
assertFilterMatchesSkipVectorize(
new ArrayContainsElementFilter("nestedArrayLong", ColumnType.LONG_ARRAY, new Object[]{1L, 2L, 3L}, null),
ImmutableList.of("0", "2")
);
assertFilterMatchesSkipVectorize(
new ArrayContainsElementFilter("nestedArrayLong", ColumnType.LONG_ARRAY, new Object[]{1L, 2L}, null),
ImmutableList.of()
);
}
@Test
public void testArrayContainsMvd()
{
assertFilterMatches(
new ArrayContainsElementFilter("dim2", ColumnType.STRING, "a", null),
ImmutableList.of("0", "3")
);
if (isAutoSchema()) {
assertFilterMatches(
NotDimFilter.of(new ArrayContainsElementFilter("dim2", ColumnType.STRING, "a", null)),
ImmutableList.of("1", "2", "4")
);
// [""] becomes [null] in default value mode
assertFilterMatches(
new ArrayContainsElementFilter("dim2", ColumnType.STRING, null, null),
ImmutableList.of()
);
} else {
// multi-value dimension treats [] as null, so in sql compatible mode row 1 ends up as not matching
assertFilterMatches(
NotDimFilter.of(new ArrayContainsElementFilter("dim2", ColumnType.STRING, "a", null)),
ImmutableList.of("2", "4")
);
assertFilterMatches(
new ArrayContainsElementFilter("dim2", ColumnType.STRING, null, null),
ImmutableList.of()
);
}
}
@Test
public void testNestedArrayStringColumn()
{
// duplicate of testArrayStringColumn but targeting nested.arrayString
Assume.assumeFalse(testName.contains("frame (columnar)") || testName.contains("rowBasedWithoutTypeSignature"));
/*
dim0 .. arrayString
"0", .. ["a", "b", "c"]
"1", .. []
"2", .. null
"3", .. ["a", "b", "c"]
"4", .. ["c", "d"]
"5", .. [null]
*/
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayString",
ColumnType.STRING,
"a",
null
),
ImmutableList.of("0", "3")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"nested.arrayString",
ColumnType.STRING,
"a",
null
)
),
ImmutableList.of("1", "4", "5")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayString",
ColumnType.STRING,
"c",
null
),
ImmutableList.of("0", "3", "4")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"nested.arrayString",
ColumnType.STRING,
"c",
null
)
),
ImmutableList.of("1", "5")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayString",
ColumnType.STRING,
null,
null
),
ImmutableList.of("5")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"nested.arrayString",
ColumnType.STRING,
null,
null
)
),
ImmutableList.of("0", "1", "3", "4")
);
}
@Test
public void testNestedArrayLongColumn()
{
// duplicate of testArrayLongColumn but targeting nested.arrayLong
Assume.assumeFalse(testName.contains("frame (columnar)") || testName.contains("rowBasedWithoutTypeSignature"));
/*
dim0 .. arrayLong
"0", .. [1L, 2L, 3L]
"1", .. []
"2", .. [1L, 2L, 3L]
"3", .. null
"4", .. [null]
"5", .. [123L, 345L]
*/
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayLong",
ColumnType.LONG,
2L,
null
),
ImmutableList.of("0", "2")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"nested.arrayLong",
ColumnType.LONG,
2L,
null
)
),
ImmutableList.of("1", "4", "5")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayLong",
ColumnType.LONG,
null,
null
),
ImmutableList.of("4")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"nested.arrayLong",
ColumnType.LONG,
null,
null
)
),
ImmutableList.of("0", "1", "2", "5")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayLong",
ColumnType.DOUBLE,
2.0,
null
),
ImmutableList.of("0", "2")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayLong",
ColumnType.STRING,
"2",
null
),
ImmutableList.of("0", "2")
);
}
@Test
public void testNestedArrayDoubleColumn()
{
// duplicate of testArrayDoubleColumn but targeting nested.arrayDouble
Assume.assumeTrue(canTestArrayColumns());
/*
dim0 .. arrayDouble
"0", .. [1.1, 2.2, 3.3]
"1", .. [1.1, 2.2, 3.3]
"2", .. [null]
"3", .. []
"4", .. [-1.1, -333.3]
"5", .. null
*/
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayDouble",
ColumnType.DOUBLE,
2.2,
null
),
ImmutableList.of("0", "1")
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"nested.arrayDouble",
ColumnType.DOUBLE,
2.2,
null
)
),
ImmutableList.of("2", "3", "4")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayDouble",
ColumnType.STRING,
"2.2",
null
),
ImmutableList.of("0", "1")
);
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayDouble",
ColumnType.DOUBLE,
null,
null
),
ImmutableList.of("2")
);
}
@Test
public void testNestedArrayStringColumnContainsArrays()
{
// duplicate of testArrayStringColumnContainsArrays but targeting nested.arrayString
Assume.assumeTrue(canTestArrayColumns());
// these are not nested arrays, expect no matches
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayString",
ColumnType.STRING_ARRAY,
ImmutableList.of("a", "b", "c"),
null
),
ImmutableList.of()
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"nested.arrayString",
ColumnType.STRING_ARRAY,
ImmutableList.of("a", "b", "c"),
null
)
),
ImmutableList.of("0", "1", "3", "4", "5")
);
}
@Test
public void testNestedArrayLongColumnContainsArrays()
{
// duplicate of testArrayLongColumnContainsArrays but targeting nested.arrayLong
Assume.assumeTrue(canTestArrayColumns());
// these are not nested arrays, expect no matches
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayLong",
ColumnType.LONG_ARRAY,
ImmutableList.of(1L, 2L, 3L),
null
),
ImmutableList.of()
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"nested.arrayLong",
ColumnType.LONG_ARRAY,
ImmutableList.of(1L, 2L, 3L),
null
)
),
ImmutableList.of("0", "1", "2", "4", "5")
);
}
@Test
public void testNestedArrayDoubleColumnContainsArrays()
{
// duplicate of testArrayDoubleColumnContainsArrays but targeting nested.arrayDouble
Assume.assumeTrue(canTestArrayColumns());
// these are not nested arrays, expect no matches
assertFilterMatches(
new ArrayContainsElementFilter(
"nested.arrayDouble",
ColumnType.DOUBLE_ARRAY,
ImmutableList.of(1.1, 2.2, 3.3),
null
),
ImmutableList.of()
);
assertFilterMatches(
NotDimFilter.of(
new ArrayContainsElementFilter(
"nested.arrayDouble",
ColumnType.DOUBLE_ARRAY,
ImmutableList.of(1.1, 2.2, 3.3),
null
)
),
ImmutableList.of("0", "1", "2", "3", "4")
);
}
@Test
public void testNestedScalarColumnContains()
{
Assume.assumeTrue(canTestArrayColumns());
// duplicate of testScalarColumnContains but targeting nested columns
assertFilterMatches(
new ArrayContainsElementFilter("nested.s0", ColumnType.STRING, "a", null),
ImmutableList.of("1", "5")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.s0", ColumnType.STRING, "b", null),
ImmutableList.of("2")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.s0", ColumnType.STRING, "c", null),
ImmutableList.of("4")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.s0", ColumnType.STRING, "noexist", null),
ImmutableList.of()
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.s0", ColumnType.STRING_ARRAY, ImmutableList.of("c"), null),
ImmutableList.of("4")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.s0", ColumnType.STRING_ARRAY, ImmutableList.of("a", "c"), null),
ImmutableList.of()
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.d0", ColumnType.DOUBLE, 10.1, null),
ImmutableList.of("1")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.d0", ColumnType.DOUBLE, 120.0245, null),
ImmutableList.of("3")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.d0", ColumnType.DOUBLE, 765.432, null),
ImmutableList.of("5")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.d0", ColumnType.DOUBLE, 765.431, null),
ImmutableList.of()
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.d0", ColumnType.DOUBLE_ARRAY, new Object[]{10.1}, null),
ImmutableList.of("1")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.d0", ColumnType.DOUBLE_ARRAY, new Object[]{10.1, 120.0245}, null),
ImmutableList.of()
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.l0", ColumnType.LONG, 100L, null),
ImmutableList.of("1")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.l0", ColumnType.LONG, 40L, null),
ImmutableList.of("2")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.l0", ColumnType.LONG, 9001L, null),
ImmutableList.of("4")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.l0", ColumnType.LONG, 9000L, null),
ImmutableList.of()
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.l0", ColumnType.LONG_ARRAY, ImmutableList.of(9001L), null),
ImmutableList.of("4")
);
assertFilterMatches(
new ArrayContainsElementFilter("nested.l0", ColumnType.LONG_ARRAY, ImmutableList.of(40L, 9001L), null),
ImmutableList.of()
);
}
}
public static class ArrayContainsElementFilterNonParameterizedTests extends InitializedNullHandlingTest
{
@Test
public void testSerde() throws JsonProcessingException
{
ObjectMapper mapper = new DefaultObjectMapper();
ArrayContainsElementFilter filter = new ArrayContainsElementFilter("x", ColumnType.STRING, "hello", null);
String s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
filter = new ArrayContainsElementFilter("x", ColumnType.LONG, 1L, null);
s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
filter = new ArrayContainsElementFilter("x", ColumnType.LONG, 1, null);
s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
filter = new ArrayContainsElementFilter("x", ColumnType.DOUBLE, 111.111, null);
s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
filter = new ArrayContainsElementFilter("x", ColumnType.FLOAT, 1234.0f, null);
s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
filter = new ArrayContainsElementFilter("x", ColumnType.STRING_ARRAY, new Object[]{"a", "b", null, "c"}, null);
s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
filter = new ArrayContainsElementFilter("x", ColumnType.STRING_ARRAY, Arrays.asList("a", "b", null, "c"), null);
s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
filter = new ArrayContainsElementFilter("x", ColumnType.LONG_ARRAY, new Object[]{1L, null, 2L, 3L}, null);
s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
filter = new ArrayContainsElementFilter("x", ColumnType.LONG_ARRAY, Arrays.asList(1L, null, 2L, 3L), null);
s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
filter = new ArrayContainsElementFilter("x", ColumnType.DOUBLE_ARRAY, new Object[]{1.1, 2.1, null, 3.1}, null);
s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
filter = new ArrayContainsElementFilter("x", ColumnType.DOUBLE_ARRAY, Arrays.asList(1.1, 2.1, null, 3.1), null);
s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
filter = new ArrayContainsElementFilter(
"x",
ColumnType.NESTED_DATA,
ImmutableMap.of("x", ImmutableList.of(1, 2, 3)),
null
);
s = mapper.writeValueAsString(filter);
Assert.assertEquals(filter, mapper.readValue(s, ArrayContainsElementFilter.class));
}
@Test
public void testRewrite()
{
ArrayContainsElementFilter filter = new ArrayContainsElementFilter("x", ColumnType.STRING, "hello", null);
Filter rewrite = filter.rewriteRequiredColumns(ImmutableMap.of("x", "y"));
ArrayContainsElementFilter expected = new ArrayContainsElementFilter("y", ColumnType.STRING, "hello", null);
Assert.assertEquals(expected, rewrite);
}
@Test
public void testGetCacheKey()
{
ArrayContainsElementFilter f1 = new ArrayContainsElementFilter("x", ColumnType.STRING, "hello", null);
ArrayContainsElementFilter f1_2 = new ArrayContainsElementFilter("x", ColumnType.STRING, "hello", null);
ArrayContainsElementFilter f2 = new ArrayContainsElementFilter("x", ColumnType.STRING, "world", null);
ArrayContainsElementFilter f3 = new ArrayContainsElementFilter(
"x",
ColumnType.STRING,
"hello",
new FilterTuning(true, null, null)
);
Assert.assertArrayEquals(f1.getCacheKey(), f1_2.getCacheKey());
Assert.assertFalse(Arrays.equals(f1.getCacheKey(), f2.getCacheKey()));
Assert.assertArrayEquals(f1.getCacheKey(), f3.getCacheKey());
f1 = new ArrayContainsElementFilter("x", ColumnType.LONG, 1L, null);
f1_2 = new ArrayContainsElementFilter("x", ColumnType.LONG, 1, null);
f2 = new ArrayContainsElementFilter("x", ColumnType.LONG, 2L, null);
f3 = new ArrayContainsElementFilter("x", ColumnType.LONG, 1L, new FilterTuning(true, null, null));
Assert.assertArrayEquals(f1.getCacheKey(), f1_2.getCacheKey());
Assert.assertFalse(Arrays.equals(f1.getCacheKey(), f2.getCacheKey()));
Assert.assertArrayEquals(f1.getCacheKey(), f3.getCacheKey());
f1 = new ArrayContainsElementFilter("x", ColumnType.DOUBLE, 1.1, null);
f1_2 = new ArrayContainsElementFilter("x", ColumnType.DOUBLE, 1.1, null);
f2 = new ArrayContainsElementFilter("x", ColumnType.DOUBLE, 2.2, null);
f3 = new ArrayContainsElementFilter("x", ColumnType.DOUBLE, 1.1, new FilterTuning(true, null, null));
Assert.assertArrayEquals(f1.getCacheKey(), f1_2.getCacheKey());
Assert.assertFalse(Arrays.equals(f1.getCacheKey(), f2.getCacheKey()));
Assert.assertArrayEquals(f1.getCacheKey(), f3.getCacheKey());
f1 = new ArrayContainsElementFilter("x", ColumnType.FLOAT, 1.1f, null);
f1_2 = new ArrayContainsElementFilter("x", ColumnType.FLOAT, 1.1f, null);
f2 = new ArrayContainsElementFilter("x", ColumnType.FLOAT, 2.2f, null);
f3 = new ArrayContainsElementFilter("x", ColumnType.FLOAT, 1.1f, new FilterTuning(true, null, null));
Assert.assertArrayEquals(f1.getCacheKey(), f1_2.getCacheKey());
Assert.assertFalse(Arrays.equals(f1.getCacheKey(), f2.getCacheKey()));
Assert.assertArrayEquals(f1.getCacheKey(), f3.getCacheKey());
f1 = new ArrayContainsElementFilter("x", ColumnType.STRING_ARRAY, new Object[]{"a", "b", null, "c"}, null);
f1_2 = new ArrayContainsElementFilter("x", ColumnType.STRING_ARRAY, Arrays.asList("a", "b", null, "c"), null);
f2 = new ArrayContainsElementFilter("x", ColumnType.STRING_ARRAY, new Object[]{"a", "b", "c"}, null);
f3 = new ArrayContainsElementFilter(
"x",
ColumnType.STRING_ARRAY,
new Object[]{"a", "b", null, "c"},
new FilterTuning(true, null, null)
);
Assert.assertArrayEquals(f1.getCacheKey(), f1_2.getCacheKey());
Assert.assertFalse(Arrays.equals(f1.getCacheKey(), f2.getCacheKey()));
Assert.assertArrayEquals(f1.getCacheKey(), f3.getCacheKey());
f1 = new ArrayContainsElementFilter("x", ColumnType.LONG_ARRAY, new Object[]{100L, 200L, null, 300L}, null);
f1_2 = new ArrayContainsElementFilter("x", ColumnType.LONG_ARRAY, Arrays.asList(100L, 200L, null, 300L), null);
f2 = new ArrayContainsElementFilter("x", ColumnType.LONG_ARRAY, new Object[]{100L, null, 200L, 300L}, null);
f3 = new ArrayContainsElementFilter(
"x",
ColumnType.LONG_ARRAY,
new Object[]{100L, 200L, null, 300L},
new FilterTuning(true, null, null)
);
Assert.assertArrayEquals(f1.getCacheKey(), f1_2.getCacheKey());
Assert.assertFalse(Arrays.equals(f1.getCacheKey(), f2.getCacheKey()));
Assert.assertArrayEquals(f1.getCacheKey(), f3.getCacheKey());
f1 = new ArrayContainsElementFilter(
"x",
ColumnType.DOUBLE_ARRAY,
new Object[]{1.001, null, 20.0002, 300.0003},
null
);
f1_2 = new ArrayContainsElementFilter(
"x",
ColumnType.DOUBLE_ARRAY,
Arrays.asList(1.001, null, 20.0002, 300.0003),
null
);
f2 = new ArrayContainsElementFilter(
"x",
ColumnType.DOUBLE_ARRAY,
new Object[]{1.001, 20.0002, 300.0003, null},
null
);
f3 = new ArrayContainsElementFilter(
"x",
ColumnType.DOUBLE_ARRAY,
new Object[]{1.001, null, 20.0002, 300.0003},
new FilterTuning(true, null, null)
);
Assert.assertArrayEquals(f1.getCacheKey(), f1_2.getCacheKey());
Assert.assertFalse(Arrays.equals(f1.getCacheKey(), f2.getCacheKey()));
Assert.assertArrayEquals(f1.getCacheKey(), f3.getCacheKey());
BuiltInTypesModule.registerHandlersAndSerde();
f1 = new ArrayContainsElementFilter(
"x",
ColumnType.NESTED_DATA,
ImmutableMap.of("x", ImmutableList.of(1, 2, 3)),
null
);
f1_2 = new ArrayContainsElementFilter(
"x",
ColumnType.NESTED_DATA,
ImmutableMap.of("x", ImmutableList.of(1, 2, 3)),
null
);
f2 = new ArrayContainsElementFilter(
"x",
ColumnType.NESTED_DATA,
ImmutableMap.of("x", ImmutableList.of(1, 2, 3, 4)),
null
);
f3 = new ArrayContainsElementFilter(
"x",
ColumnType.NESTED_DATA,
ImmutableMap.of("x", ImmutableList.of(1, 2, 3)),
new FilterTuning(true, null, null)
);
Assert.assertArrayEquals(f1.getCacheKey(), f1_2.getCacheKey());
Assert.assertFalse(Arrays.equals(f1.getCacheKey(), f2.getCacheKey()));
Assert.assertArrayEquals(f1.getCacheKey(), f3.getCacheKey());
}
@Test
public void testInvalidParameters()
{
Throwable t = Assert.assertThrows(
DruidException.class,
() -> new ArrayContainsElementFilter(null, ColumnType.STRING, null, null)
);
Assert.assertEquals("Invalid array_contains filter, column cannot be null", t.getMessage());
t = Assert.assertThrows(
DruidException.class,
() -> new ArrayContainsElementFilter("dim0", null, null, null)
);
Assert.assertEquals(
"Invalid array_contains filter on column [dim0], elementMatchValueType cannot be null",
t.getMessage()
);
}
@Test
public void test_equals()
{
EqualsVerifier.forClass(ArrayContainsElementFilter.class).usingGetClass()
.withNonnullFields(
"column",
"elementMatchValueType",
"elementMatchValueEval",
"elementMatchValue",
"predicateFactory",
"optimizedFilterIncludeUnknown",
"optimizedFilterNoIncludeUnknown"
)
.withPrefabValues(ColumnType.class, ColumnType.STRING, ColumnType.DOUBLE)
.withIgnoredFields(
"predicateFactory",
"optimizedFilterIncludeUnknown",
"optimizedFilterNoIncludeUnknown",
"elementMatchValue"
)
.verify();
}
}
}
|
apache/ranger | 37,869 | agents-common/src/main/java/org/apache/ranger/plugin/model/RangerGds.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ranger.plugin.model;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyItemDataMaskInfo;
import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyItemRowFilterInfo;
import org.apache.ranger.plugin.model.RangerPolicy.RangerPolicyResource;
import org.apache.ranger.plugin.model.RangerPrincipal.PrincipalType;
import org.apache.ranger.plugin.store.PList;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class RangerGds {
public enum GdsPermission { NONE, LIST, VIEW, AUDIT, POLICY_ADMIN, ADMIN }
public enum GdsShareStatus { NONE, REQUESTED, GRANTED, DENIED, ACTIVE }
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public static class RangerGdsBaseModelObject extends RangerBaseModelObject implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private String description;
private Map<String, String> options;
private Map<String, String> additionalInfo;
public RangerGdsBaseModelObject() {
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Map<String, String> getOptions() {
return options;
}
public void setOptions(Map<String, String> options) {
this.options = options;
}
public Map<String, String> getAdditionalInfo() {
return additionalInfo;
}
public void setAdditionalInfo(Map<String, String> additionalInfo) {
this.additionalInfo = additionalInfo;
}
@Override
public StringBuilder toString(StringBuilder sb) {
super.toString(sb);
sb.append("description={").append(description).append("} ")
.append("options={").append(options).append("} ")
.append("additionalInfo={").append(additionalInfo).append("} ");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public static class RangerDataset extends RangerGdsBaseModelObject implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private String name;
private RangerGdsObjectACL acl;
private RangerValiditySchedule validitySchedule;
private String termsOfUse;
private List<String> labels;
private List<String> keywords;
public RangerDataset() {
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public RangerGdsObjectACL getAcl() {
return acl;
}
public void setAcl(RangerGdsObjectACL acl) {
this.acl = acl;
}
public RangerValiditySchedule getValiditySchedule() {
return validitySchedule;
}
public void setValiditySchedule(RangerValiditySchedule validitySchedule) {
this.validitySchedule = validitySchedule;
}
public String getTermsOfUse() {
return termsOfUse;
}
public void setTermsOfUse(String termsOfUse) {
this.termsOfUse = termsOfUse;
}
public List<String> getLabels() {
return labels;
}
public void setLabels(List<String> labels) {
this.labels = labels;
}
public List<String> getKeywords() {
return keywords;
}
public void setKeywords(List<String> keywords) {
this.keywords = keywords;
}
@Override
public StringBuilder toString(StringBuilder sb) {
sb.append("RangerDataset={");
super.toString(sb);
sb.append("name={").append(name).append("} ")
.append("acl={").append(acl).append("} ")
.append("validitySchedule={").append(validitySchedule).append("} ")
.append("termsOfUse={").append(termsOfUse).append("} ")
.append("labels={").append(labels).append("} ")
.append("keywords={").append(keywords).append("} ")
.append("}");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public static class RangerProject extends RangerGdsBaseModelObject implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private String name;
private RangerGdsObjectACL acl;
private RangerValiditySchedule validitySchedule;
private String termsOfUse;
public RangerProject() {
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public RangerGdsObjectACL getAcl() {
return acl;
}
public void setAcl(RangerGdsObjectACL acl) {
this.acl = acl;
}
public RangerValiditySchedule getValiditySchedule() {
return validitySchedule;
}
public void setValiditySchedule(RangerValiditySchedule validitySchedule) {
this.validitySchedule = validitySchedule;
}
public String getTermsOfUse() {
return termsOfUse;
}
public void setTermsOfUse(String termsOfUse) {
this.termsOfUse = termsOfUse;
}
@Override
public StringBuilder toString(StringBuilder sb) {
sb.append("RangerProject={");
super.toString(sb);
sb.append("name={").append(name).append("} ")
.append("acl={").append(acl).append("} ")
.append("validitySchedule={").append(validitySchedule).append("} ")
.append("termsOfUse={").append(termsOfUse).append("} ")
.append("}");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public static class RangerDataShare extends RangerGdsBaseModelObject implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private String name;
private RangerGdsObjectACL acl;
private String service;
private String zone;
private String conditionExpr;
private Set<String> defaultAccessTypes;
private List<RangerGdsMaskInfo> defaultTagMasks;
private String termsOfUse;
public RangerDataShare() {
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public RangerGdsObjectACL getAcl() {
return acl;
}
public void setAcl(RangerGdsObjectACL acl) {
this.acl = acl;
}
public String getService() {
return service;
}
public void setService(String service) {
this.service = service;
}
public String getZone() {
return zone;
}
public void setZone(String zone) {
this.zone = zone;
}
public String getConditionExpr() {
return conditionExpr;
}
public void setConditionExpr(String conditionExpr) {
this.conditionExpr = conditionExpr;
}
public Set<String> getDefaultAccessTypes() {
return defaultAccessTypes;
}
public void setDefaultAccessTypes(Set<String> defaultAccessTypes) {
this.defaultAccessTypes = defaultAccessTypes;
}
public List<RangerGdsMaskInfo> getDefaultTagMasks() {
return defaultTagMasks;
}
public void setDefaultTagMasks(List<RangerGdsMaskInfo> defaultTagMasks) {
this.defaultTagMasks = defaultTagMasks;
}
public String getTermsOfUse() {
return termsOfUse;
}
public void setTermsOfUse(String termsOfUse) {
this.termsOfUse = termsOfUse;
}
@Override
public StringBuilder toString(StringBuilder sb) {
sb.append("RangerDataShare={");
super.toString(sb);
sb.append("name={").append(name).append("} ")
.append("acl={").append(acl).append("} ")
.append("service={").append(service).append("} ")
.append("zone={").append(zone).append("} ")
.append("conditionExpr={").append(conditionExpr).append("} ")
.append("defaultAccessTypes={").append(defaultAccessTypes).append("} ")
.append("defaultTagMasks={").append(defaultTagMasks).append("} ")
.append("termsOfUse={").append(termsOfUse).append("} ")
.append("}");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public static class RangerSharedResource extends RangerGdsBaseModelObject implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private String name;
private Long dataShareId;
private Map<String, RangerPolicyResource> resource;
private RangerPolicyResource subResource;
private String subResourceType;
private String conditionExpr;
private Set<String> accessTypes;
private RangerPolicyItemRowFilterInfo rowFilter;
private List<RangerGdsMaskInfo> subResourceMasks;
private Set<String> profiles;
public RangerSharedResource() {
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Long getDataShareId() {
return dataShareId;
}
public void setDataShareId(Long dataShareId) {
this.dataShareId = dataShareId;
}
public Map<String, RangerPolicyResource> getResource() {
return resource;
}
public void setResource(Map<String, RangerPolicyResource> resource) {
this.resource = resource;
}
public RangerPolicyResource getSubResource() {
return subResource;
}
public void setSubResource(RangerPolicyResource subResource) {
this.subResource = subResource;
}
public String getSubResourceType() {
return subResourceType;
}
public void setSubResourceType(String subResourceType) {
this.subResourceType = subResourceType;
}
public String getConditionExpr() {
return conditionExpr;
}
public void setConditionExpr(String conditionExpr) {
this.conditionExpr = conditionExpr;
}
public Set<String> getAccessTypes() {
return accessTypes;
}
public void setAccessTypes(Set<String> accessTypes) {
this.accessTypes = accessTypes;
}
public RangerPolicyItemRowFilterInfo getRowFilter() {
return rowFilter;
}
public void setRowFilter(RangerPolicyItemRowFilterInfo rowFilter) {
this.rowFilter = rowFilter;
}
public List<RangerGdsMaskInfo> getSubResourceMasks() {
return subResourceMasks;
}
public void setSubResourceMasks(List<RangerGdsMaskInfo> subResourceMasks) {
this.subResourceMasks = subResourceMasks;
}
public Set<String> getProfiles() {
return profiles;
}
public void setProfiles(Set<String> profiles) {
this.profiles = profiles;
}
public StringBuilder toString(StringBuilder sb) {
sb.append("RangerSharedResource={");
super.toString(sb);
sb.append("name").append(name).append("} ")
.append("dataShareId={").append(dataShareId).append("} ")
.append("resource={").append(resource).append("} ")
.append("subResource={").append(subResource).append("} ")
.append("subResourceType={").append(subResourceType).append("} ")
.append("conditionExpr={").append(conditionExpr).append("} ")
.append("accessTypes={").append(accessTypes).append("} ")
.append("rowFilterInfo={").append(rowFilter).append("} ")
.append("subResourceMasks={").append(subResourceMasks).append("} ")
.append("profiles={").append(profiles).append("} ")
.append("}");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public static class RangerGdsMaskInfo implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private List<String> values;
private RangerPolicyItemDataMaskInfo maskInfo;
public List<String> getValues() {
return values;
}
public void setValues(List<String> values) {
this.values = values;
}
public RangerPolicyItemDataMaskInfo getMaskInfo() {
return maskInfo;
}
public void setMaskInfo(RangerPolicyItemDataMaskInfo maskInfo) {
this.maskInfo = maskInfo;
}
@Override
public String toString() {
return toString(new StringBuilder()).toString();
}
public StringBuilder toString(StringBuilder sb) {
sb.append("RangerGdsMaskInfo={")
.append("values=").append(values).append(" ")
.append("maskInfo=").append(maskInfo)
.append("}");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public static class RangerDataShareInDataset extends RangerGdsBaseModelObject implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private Long dataShareId;
private Long datasetId;
private GdsShareStatus status;
private RangerValiditySchedule validitySchedule;
private Set<String> profiles;
private String approver;
public RangerDataShareInDataset() {
}
public Long getDataShareId() {
return dataShareId;
}
public void setDataShareId(Long dataShareId) {
this.dataShareId = dataShareId;
}
public Long getDatasetId() {
return datasetId;
}
public void setDatasetId(Long datasetId) {
this.datasetId = datasetId;
}
public GdsShareStatus getStatus() {
return status;
}
public void setStatus(GdsShareStatus status) {
this.status = status;
}
public RangerValiditySchedule getValiditySchedule() {
return validitySchedule;
}
public void setValiditySchedule(RangerValiditySchedule validitySchedule) {
this.validitySchedule = validitySchedule;
}
public Set<String> getProfiles() {
return profiles;
}
public void setProfiles(Set<String> profiles) {
this.profiles = profiles;
}
public String getApprover() {
return approver;
}
public void setApprover(String approver) {
this.approver = approver;
}
@Override
public StringBuilder toString(StringBuilder sb) {
sb.append("RangerDataShareInDataset={");
super.toString(sb);
sb.append("dataShareId={").append(dataShareId).append("} ")
.append("datasetId={").append(datasetId).append("} ")
.append("status={").append(status).append("} ")
.append("validitySchedule={").append(validitySchedule).append("} ")
.append("profiles={").append(profiles).append("} ")
.append("approver={").append(approver).append("} ")
.append("}");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public static class RangerDatasetInProject extends RangerGdsBaseModelObject implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private Long datasetId;
private Long projectId;
private GdsShareStatus status;
private RangerValiditySchedule validitySchedule;
private Set<String> profiles;
private String approver;
public RangerDatasetInProject() {
}
public Long getDatasetId() {
return datasetId;
}
public void setDatasetId(Long datasetId) {
this.datasetId = datasetId;
}
public Long getProjectId() {
return projectId;
}
public void setProjectId(Long projectId) {
this.projectId = projectId;
}
public GdsShareStatus getStatus() {
return status;
}
public void setStatus(GdsShareStatus status) {
this.status = status;
}
public RangerValiditySchedule getValiditySchedule() {
return validitySchedule;
}
public void setValiditySchedule(RangerValiditySchedule validitySchedule) {
this.validitySchedule = validitySchedule;
}
public Set<String> getProfiles() {
return profiles;
}
public void setProfiles(Set<String> profiles) {
this.profiles = profiles;
}
public String getApprover() {
return approver;
}
public void setApprover(String approver) {
this.approver = approver;
}
@Override
public StringBuilder toString(StringBuilder sb) {
sb.append("RangerDatasetInProject={");
super.toString(sb);
sb.append("datasetGuid={").append(datasetId).append("} ")
.append("projectGuid={").append(projectId).append("} ")
.append("status={").append(status).append("} ")
.append("validitySchedule={").append(validitySchedule).append("} ")
.append("profiles={").append(profiles).append("} ")
.append("approver={").append(approver).append("} ")
.append("}");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public static class RangerGdsObjectACL implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private Map<String, GdsPermission> users;
private Map<String, GdsPermission> groups;
private Map<String, GdsPermission> roles;
public RangerGdsObjectACL() {
}
public Map<String, GdsPermission> getUsers() {
return users;
}
public void setUsers(Map<String, GdsPermission> users) {
this.users = users;
}
public Map<String, GdsPermission> getGroups() {
return groups;
}
public void setGroups(Map<String, GdsPermission> groups) {
this.groups = groups;
}
public Map<String, GdsPermission> getRoles() {
return roles;
}
public void setRoles(Map<String, GdsPermission> roles) {
this.roles = roles;
}
@Override
public String toString() {
return toString(new StringBuilder()).toString();
}
public StringBuilder toString(StringBuilder sb) {
sb.append("RangerGdsObjectACL={");
sb.append("users={").append(users).append("} ")
.append("groups={").append(groups).append("} ")
.append("roles={").append(roles).append("} ")
.append("}");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public static class DatasetSummary extends RangerBaseModelObject implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private String name;
private String description;
private GdsPermission permissionForCaller;
private Map<PrincipalType, Integer> principalsCount;
private Map<PrincipalType, Integer> aclPrincipalsCount;
private Long projectsCount;
private Long totalResourceCount;
private List<DataShareInDatasetSummary> dataShares;
private RangerValiditySchedule validitySchedule;
private List<String> labels;
private List<String> keywords;
public DatasetSummary() {
super();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public GdsPermission getPermissionForCaller() {
return permissionForCaller;
}
public void setPermissionForCaller(GdsPermission permissionForCaller) {
this.permissionForCaller = permissionForCaller;
}
public Map<PrincipalType, Integer> getPrincipalsCount() {
return principalsCount;
}
public void setPrincipalsCount(Map<PrincipalType, Integer> principalsCount) {
this.principalsCount = principalsCount;
}
public Long getProjectsCount() {
return projectsCount;
}
public void setProjectsCount(Long projectsCount) {
this.projectsCount = projectsCount;
}
public Long getTotalResourceCount() {
return totalResourceCount;
}
public void setTotalResourceCount(Long totalResourceCount) {
this.totalResourceCount = totalResourceCount;
}
public List<DataShareInDatasetSummary> getDataShares() {
return dataShares;
}
public void setDataShares(List<DataShareInDatasetSummary> dataShares) {
this.dataShares = dataShares;
}
public Map<PrincipalType, Integer> getAclPrincipalsCount() {
return aclPrincipalsCount;
}
public void setAclPrincipalsCount(Map<PrincipalType, Integer> aclPrincipalsCount) {
this.aclPrincipalsCount = aclPrincipalsCount;
}
public RangerValiditySchedule getValiditySchedule() {
return validitySchedule;
}
public void setValiditySchedule(RangerValiditySchedule validitySchedule) {
this.validitySchedule = validitySchedule;
}
public List<String> getLabels() {
return labels;
}
public void setLabels(List<String> labels) {
this.labels = labels;
}
public List<String> getKeywords() {
return keywords;
}
public void setKeywords(List<String> keywords) {
this.keywords = keywords;
}
@Override
public String toString() {
return toString(new StringBuilder()).toString();
}
public StringBuilder toString(StringBuilder sb) {
sb.append("DatasetSummary={");
super.toString(sb);
sb.append("name={").append(name).append("} ")
.append("description={").append(description).append("} ")
.append("permissionForCaller={").append(permissionForCaller).append("} ")
.append("principalsCount={").append(principalsCount).append("} ")
.append("projectsCount={").append(projectsCount).append("} ")
.append("aclPrincipalsCount={").append(aclPrincipalsCount).append("} ")
.append("totalResourceCount={").append(totalResourceCount).append("} ")
.append("dataShares={").append(dataShares).append("} ")
.append("validitySchedule={").append(validitySchedule).append("} ")
.append("labels={").append(labels).append("} ")
.append("keywords={").append(keywords).append("} ")
.append("}");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public static class DatasetsSummary extends PList<DatasetSummary> {
private static final long serialVersionUID = 1L;
private Map<String, Map<String, Integer>> additionalInfo;
public DatasetsSummary() {
super();
}
public DatasetsSummary(PList<DatasetSummary> datasetSummary, Map<String, Map<String, Integer>> additionalInfo) {
super(datasetSummary);
this.additionalInfo = (additionalInfo != null) ? additionalInfo : Collections.emptyMap();
}
public Map<String, Map<String, Integer>> getAdditionalInfo() {
return additionalInfo;
}
public void setAdditionalInfo(Map<String, Map<String, Integer>> additionalInfo) {
this.additionalInfo = additionalInfo;
}
@Override
public String toString() {
return toString(new StringBuilder()).toString();
}
public StringBuilder toString(StringBuilder sb) {
sb.append("DatasetsSummary={")
.append("list={").append(this.list).append("} ")
.append("additionalInfo={").append(additionalInfo).append("} ")
.append("}");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public static class DataShareSummary extends RangerBaseModelObject implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private String name;
private String description;
private GdsPermission permissionForCaller;
private Long resourceCount;
private Long serviceId;
private String serviceName;
private String serviceType;
private Long zoneId;
private String zoneName;
private List<DataShareInDatasetSummary> datasets;
public DataShareSummary() {
super();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public GdsPermission getPermissionForCaller() {
return permissionForCaller;
}
public void setPermissionForCaller(GdsPermission permissionForCaller) {
this.permissionForCaller = permissionForCaller;
}
public Long getResourceCount() {
return resourceCount;
}
public void setResourceCount(Long resourceCount) {
this.resourceCount = resourceCount;
}
public Long getServiceId() {
return serviceId;
}
public void setServiceId(Long serviceId) {
this.serviceId = serviceId;
}
public String getServiceName() {
return serviceName;
}
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
public String getServiceType() {
return serviceType;
}
public void setServiceType(String serviceType) {
this.serviceType = serviceType;
}
public Long getZoneId() {
return zoneId;
}
public void setZoneId(Long zoneId) {
this.zoneId = zoneId;
}
public String getZoneName() {
return zoneName;
}
public void setZoneName(String zoneName) {
this.zoneName = zoneName;
}
public List<DataShareInDatasetSummary> getDatasets() {
return datasets;
}
public void setDatasets(List<DataShareInDatasetSummary> datasets) {
this.datasets = datasets;
}
@Override
public String toString() {
return toString(new StringBuilder()).toString();
}
public StringBuilder toString(StringBuilder sb) {
sb.append("DataShareSummary={");
super.toString(sb);
sb.append("name={").append(name).append("} ")
.append("description={").append(description).append("} ")
.append("permissionForCaller={").append(permissionForCaller).append("} ")
.append("resourceCount={").append(resourceCount).append("} ")
.append("serviceId={").append(serviceId).append("} ")
.append("serviceName={").append(serviceName).append("} ")
.append("serviceType={").append(serviceType).append("} ")
.append("zoneName={").append(zoneName).append("} ")
.append("zoneId={").append(zoneId).append("} ")
.append("datasets={").append(datasets).append("} ")
.append("}");
return sb;
}
}
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
public static class DataShareInDatasetSummary extends RangerBaseModelObject implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private Long datasetId;
private String datasetName;
private Long dataShareId;
private String dataShareName;
private Long serviceId;
private String serviceName;
private Long zoneId;
private String zoneName;
private Long resourceCount;
private GdsShareStatus shareStatus;
private String approver;
public DataShareInDatasetSummary() {
super();
}
public String getDatasetName() {
return datasetName;
}
public void setDatasetName(String datasetName) {
this.datasetName = datasetName;
}
public Long getDatasetId() {
return datasetId;
}
public void setDatasetId(Long datasetId) {
this.datasetId = datasetId;
}
public Long getDataShareId() {
return dataShareId;
}
public void setDataShareId(Long dataShareId) {
this.dataShareId = dataShareId;
}
public String getDataShareName() {
return dataShareName;
}
public void setDataShareName(String dataShareName) {
this.dataShareName = dataShareName;
}
public Long getServiceId() {
return serviceId;
}
public void setServiceId(Long serviceId) {
this.serviceId = serviceId;
}
public String getServiceName() {
return serviceName;
}
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
public Long getZoneId() {
return zoneId;
}
public void setZoneId(Long zoneId) {
this.zoneId = zoneId;
}
public String getZoneName() {
return zoneName;
}
public void setZoneName(String zoneName) {
this.zoneName = zoneName;
}
public Long getResourceCount() {
return resourceCount;
}
public void setResourceCount(Long resourceCount) {
this.resourceCount = resourceCount;
}
public GdsShareStatus getShareStatus() {
return shareStatus;
}
public void setShareStatus(GdsShareStatus shareStatus) {
this.shareStatus = shareStatus;
}
public String getApprover() {
return approver;
}
public void setApprover(String approver) {
this.approver = approver;
}
@Override
public String toString() {
return toString(new StringBuilder()).toString();
}
public StringBuilder toString(StringBuilder sb) {
sb.append("DataShareInDatasetSummary={");
super.toString(sb);
sb.append("name={").append(datasetName).append("} ")
.append("datasetId={").append(datasetId).append("} ")
.append("datasetName={").append(datasetName).append("} ")
.append("dataShareId={").append(dataShareId).append("} ")
.append("dataShareName={").append(dataShareName).append("} ")
.append("serviceId={").append(serviceId).append("} ")
.append("serviceName={").append(serviceName).append("} ")
.append("zoneId={").append(zoneId).append("} ")
.append("zoneName={").append(zoneName).append("} ")
.append("resourceCount={").append(resourceCount).append("} ")
.append("shareStatus={").append(shareStatus).append("} ")
.append("approver={").append(approver).append("} ")
.append("}");
return sb;
}
}
}
|
googleapis/google-cloud-java | 38,124 | java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/CreateControlRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/control_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* Request for CreateControl method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.CreateControlRequest}
*/
public final class CreateControlRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.CreateControlRequest)
CreateControlRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateControlRequest.newBuilder() to construct.
private CreateControlRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateControlRequest() {
parent_ = "";
controlId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateControlRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_CreateControlRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_CreateControlRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.CreateControlRequest.class,
com.google.cloud.discoveryengine.v1.CreateControlRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Full resource name of parent data store. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent data store. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONTROL_FIELD_NUMBER = 2;
private com.google.cloud.discoveryengine.v1.Control control_;
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the control field is set.
*/
@java.lang.Override
public boolean hasControl() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The control.
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.Control getControl() {
return control_ == null
? com.google.cloud.discoveryengine.v1.Control.getDefaultInstance()
: control_;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ControlOrBuilder getControlOrBuilder() {
return control_ == null
? com.google.cloud.discoveryengine.v1.Control.getDefaultInstance()
: control_;
}
public static final int CONTROL_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object controlId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value must be within 1-63 characters.
* Valid characters are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The controlId.
*/
@java.lang.Override
public java.lang.String getControlId() {
java.lang.Object ref = controlId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
controlId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value must be within 1-63 characters.
* Valid characters are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for controlId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getControlIdBytes() {
java.lang.Object ref = controlId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
controlId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getControl());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(controlId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, controlId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getControl());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(controlId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, controlId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.CreateControlRequest)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.CreateControlRequest other =
(com.google.cloud.discoveryengine.v1.CreateControlRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasControl() != other.hasControl()) return false;
if (hasControl()) {
if (!getControl().equals(other.getControl())) return false;
}
if (!getControlId().equals(other.getControlId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasControl()) {
hash = (37 * hash) + CONTROL_FIELD_NUMBER;
hash = (53 * hash) + getControl().hashCode();
}
hash = (37 * hash) + CONTROL_ID_FIELD_NUMBER;
hash = (53 * hash) + getControlId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1.CreateControlRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for CreateControl method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.CreateControlRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.CreateControlRequest)
com.google.cloud.discoveryengine.v1.CreateControlRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_CreateControlRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_CreateControlRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.CreateControlRequest.class,
com.google.cloud.discoveryengine.v1.CreateControlRequest.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.CreateControlRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getControlFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
control_ = null;
if (controlBuilder_ != null) {
controlBuilder_.dispose();
controlBuilder_ = null;
}
controlId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_CreateControlRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.CreateControlRequest getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.CreateControlRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.CreateControlRequest build() {
com.google.cloud.discoveryengine.v1.CreateControlRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.CreateControlRequest buildPartial() {
com.google.cloud.discoveryengine.v1.CreateControlRequest result =
new com.google.cloud.discoveryengine.v1.CreateControlRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.discoveryengine.v1.CreateControlRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.control_ = controlBuilder_ == null ? control_ : controlBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.controlId_ = controlId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.CreateControlRequest) {
return mergeFrom((com.google.cloud.discoveryengine.v1.CreateControlRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.CreateControlRequest other) {
if (other == com.google.cloud.discoveryengine.v1.CreateControlRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasControl()) {
mergeControl(other.getControl());
}
if (!other.getControlId().isEmpty()) {
controlId_ = other.controlId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getControlFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
controlId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Full resource name of parent data store. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent data store. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent data store. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Full resource name of parent data store. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Full resource name of parent data store. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.discoveryengine.v1.Control control_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Control,
com.google.cloud.discoveryengine.v1.Control.Builder,
com.google.cloud.discoveryengine.v1.ControlOrBuilder>
controlBuilder_;
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the control field is set.
*/
public boolean hasControl() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The control.
*/
public com.google.cloud.discoveryengine.v1.Control getControl() {
if (controlBuilder_ == null) {
return control_ == null
? com.google.cloud.discoveryengine.v1.Control.getDefaultInstance()
: control_;
} else {
return controlBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setControl(com.google.cloud.discoveryengine.v1.Control value) {
if (controlBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
control_ = value;
} else {
controlBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setControl(com.google.cloud.discoveryengine.v1.Control.Builder builderForValue) {
if (controlBuilder_ == null) {
control_ = builderForValue.build();
} else {
controlBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeControl(com.google.cloud.discoveryengine.v1.Control value) {
if (controlBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& control_ != null
&& control_ != com.google.cloud.discoveryengine.v1.Control.getDefaultInstance()) {
getControlBuilder().mergeFrom(value);
} else {
control_ = value;
}
} else {
controlBuilder_.mergeFrom(value);
}
if (control_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearControl() {
bitField0_ = (bitField0_ & ~0x00000002);
control_ = null;
if (controlBuilder_ != null) {
controlBuilder_.dispose();
controlBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.discoveryengine.v1.Control.Builder getControlBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getControlFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.discoveryengine.v1.ControlOrBuilder getControlOrBuilder() {
if (controlBuilder_ != null) {
return controlBuilder_.getMessageOrBuilder();
} else {
return control_ == null
? com.google.cloud.discoveryengine.v1.Control.getDefaultInstance()
: control_;
}
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>
* .google.cloud.discoveryengine.v1.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Control,
com.google.cloud.discoveryengine.v1.Control.Builder,
com.google.cloud.discoveryengine.v1.ControlOrBuilder>
getControlFieldBuilder() {
if (controlBuilder_ == null) {
controlBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Control,
com.google.cloud.discoveryengine.v1.Control.Builder,
com.google.cloud.discoveryengine.v1.ControlOrBuilder>(
getControl(), getParentForChildren(), isClean());
control_ = null;
}
return controlBuilder_;
}
private java.lang.Object controlId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value must be within 1-63 characters.
* Valid characters are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The controlId.
*/
public java.lang.String getControlId() {
java.lang.Object ref = controlId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
controlId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value must be within 1-63 characters.
* Valid characters are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for controlId.
*/
public com.google.protobuf.ByteString getControlIdBytes() {
java.lang.Object ref = controlId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
controlId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value must be within 1-63 characters.
* Valid characters are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The controlId to set.
* @return This builder for chaining.
*/
public Builder setControlId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
controlId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value must be within 1-63 characters.
* Valid characters are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearControlId() {
controlId_ = getDefaultInstance().getControlId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value must be within 1-63 characters.
* Valid characters are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for controlId to set.
* @return This builder for chaining.
*/
public Builder setControlIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
controlId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.CreateControlRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.CreateControlRequest)
private static final com.google.cloud.discoveryengine.v1.CreateControlRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.CreateControlRequest();
}
public static com.google.cloud.discoveryengine.v1.CreateControlRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateControlRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateControlRequest>() {
@java.lang.Override
public CreateControlRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateControlRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateControlRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.CreateControlRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hive | 38,308 | llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.llap.daemon.impl;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryPoolMXBean;
import java.lang.management.MemoryType;
import java.net.InetSocketAddress;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import javax.management.ObjectName;
import javax.net.SocketFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.JvmPauseMonitor;
import org.apache.hadoop.hive.common.LogUtils;
import org.apache.hadoop.hive.common.OTELUtils;
import org.apache.hadoop.hive.conf.Constants;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.llap.DaemonId;
import org.apache.hadoop.hive.llap.LlapDaemonInfo;
import org.apache.hadoop.hive.llap.LlapOutputFormatService;
import org.apache.hadoop.hive.llap.LlapUgiManager;
import org.apache.hadoop.hive.llap.LlapUtil;
import org.apache.hadoop.hive.llap.configuration.LlapDaemonConfiguration;
import org.apache.hadoop.hive.llap.daemon.ContainerRunner;
import org.apache.hadoop.hive.llap.daemon.QueryFailedHandler;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentResponseProto;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SetCapacityRequestProto;
import org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SetCapacityResponseProto;
import org.apache.hadoop.hive.llap.daemon.services.impl.LlapWebServices;
import org.apache.hadoop.hive.llap.io.api.LlapProxy;
import org.apache.hadoop.hive.llap.metrics.LLAPOTELExporter;
import org.apache.hadoop.hive.llap.metrics.LlapDaemonExecutorMetrics;
import org.apache.hadoop.hive.llap.metrics.LlapDaemonJvmMetrics;
import org.apache.hadoop.hive.llap.metrics.LlapMetricsSystem;
import org.apache.hadoop.hive.llap.metrics.MetricsUtils;
import org.apache.hadoop.hive.llap.registry.impl.LlapRegistryService;
import org.apache.hadoop.hive.llap.security.LlapExtClientJwtHelper;
import org.apache.hadoop.hive.llap.security.SecretManager;
import org.apache.hadoop.hive.llap.shufflehandler.ShuffleHandler;
import org.apache.hadoop.hive.ql.ServiceContext;
import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge.UdfWhitelistChecker;
import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hive.common.util.HiveVersionInfo;
import org.apache.hive.common.util.ShutdownHookManager;
import org.apache.logging.log4j.core.config.Configurator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Ints;
public class LlapDaemon extends CompositeService implements ContainerRunner, LlapDaemonMXBean {
private static final Logger LOG = LoggerFactory.getLogger(LlapDaemon.class);
private final Configuration shuffleHandlerConf;
private final SecretManager secretManager;
private final LlapProtocolServerImpl server;
private final ContainerRunnerImpl containerRunner;
private final AMReporter amReporter;
private final LlapRegistryService registry;
private final LlapWebServices webServices;
private final LlapLoadGeneratorService llapLoadGeneratorService;
private final AtomicLong numSubmissions = new AtomicLong(0);
private final JvmPauseMonitor pauseMonitor;
private final ObjectName llapDaemonInfoBean;
private final LlapDaemonExecutorMetrics metrics;
private final FunctionLocalizer fnLocalizer;
// Parameters used for JMX
private final boolean llapIoEnabled;
private final long executorMemoryPerInstance;
private final long ioMemoryPerInstance;
private final int numExecutors;
private final long maxJvmMemory;
private final String[] localDirs;
private final DaemonId daemonId;
private final SocketFactory socketFactory;
private final LlapTokenManager llapTokenManager;
private LLAPOTELExporter otelExporter = null;
// TODO Not the best way to share the address
private final AtomicReference<InetSocketAddress> srvAddress = new AtomicReference<>(),
mngAddress = new AtomicReference<>();
private final AtomicReference<Integer> shufflePort = new AtomicReference<>();
public LlapDaemon(Configuration daemonConf, int numExecutors, long executorMemoryBytes,
boolean ioEnabled, boolean isDirectCache, long ioMemoryBytes, String[] localDirs, int srvPort,
boolean externalClientCloudSetupEnabled, int externalClientsRpcPort,
int mngPort, int shufflePort, int webPort, String appName) {
super("LlapDaemon");
printAsciiArt();
Preconditions.checkArgument(numExecutors > 0);
Preconditions.checkArgument(srvPort == 0 || (srvPort > 1024 && srvPort < 65536),
"Server RPC Port must be between 1025 and 65535, or 0 automatic selection");
if (externalClientCloudSetupEnabled) {
Preconditions.checkArgument(
externalClientsRpcPort == 0 || (externalClientsRpcPort > 1024 && externalClientsRpcPort < 65536),
"Server RPC port for external clients must be between 1025 and 65535, or 0 automatic selection");
}
Preconditions.checkArgument(mngPort == 0 || (mngPort > 1024 && mngPort < 65536),
"Management RPC Port must be between 1025 and 65535, or 0 automatic selection");
Preconditions.checkArgument(localDirs != null && localDirs.length > 0,
"Work dirs must be specified");
Preconditions.checkArgument(shufflePort == 0 || (shufflePort > 1024 && shufflePort < 65536),
"Shuffle Port must be between 1024 and 65535, or 0 for automatic selection");
int outputFormatServicePort = HiveConf.getIntVar(daemonConf, HiveConf.ConfVars.LLAP_DAEMON_OUTPUT_SERVICE_PORT);
Preconditions.checkArgument(outputFormatServicePort == 0
|| (outputFormatServicePort > 1024 && outputFormatServicePort < 65536),
"OutputFormatService Port must be between 1024 and 65535, or 0 for automatic selection");
String hosts = HiveConf.getTrimmedVar(daemonConf, ConfVars.LLAP_DAEMON_SERVICE_HOSTS);
if (hosts.startsWith("@")) {
String zkHosts = HiveConf.getTrimmedVar(daemonConf, ConfVars.HIVE_ZOOKEEPER_QUORUM);
LOG.info("Zookeeper Quorum: {}", zkHosts);
Preconditions.checkArgument(zkHosts != null && !zkHosts.trim().isEmpty(),
"LLAP service hosts startswith '@' but hive.zookeeper.quorum is not set." +
" hive.zookeeper.quorum must be set.");
}
String hostName = MetricsUtils.getHostName();
try {
// re-login with kerberos. This makes sure all daemons have the same login user.
if (UserGroupInformation.isSecurityEnabled()) {
final String daemonPrincipal = HiveConf.getVar(daemonConf, ConfVars.LLAP_KERBEROS_PRINCIPAL);
final String daemonKeytab = HiveConf.getVar(daemonConf, ConfVars.LLAP_KERBEROS_KEYTAB_FILE);
LlapUtil.loginWithKerberosAndUpdateCurrentUser(daemonPrincipal, daemonKeytab);
}
String currentUser = UserGroupInformation.getCurrentUser().getShortUserName();
LOG.info("Starting daemon as user: {}", currentUser);
daemonId = new DaemonId(currentUser, LlapUtil.generateClusterName(daemonConf),
hostName, appName, System.currentTimeMillis());
} catch (IOException ex) {
throw new RuntimeException(ex);
}
this.maxJvmMemory = getTotalHeapSize();
this.llapIoEnabled = ioEnabled;
long xmxHeadRoomBytes = determineXmxHeadroom(daemonConf, executorMemoryBytes, maxJvmMemory);
this.executorMemoryPerInstance = executorMemoryBytes - xmxHeadRoomBytes;
this.ioMemoryPerInstance = ioMemoryBytes;
this.numExecutors = numExecutors;
this.localDirs = localDirs;
int waitQueueSize = HiveConf.getIntVar(
daemonConf, ConfVars.LLAP_DAEMON_TASK_SCHEDULER_WAIT_QUEUE_SIZE);
boolean enablePreemption = HiveConf.getBoolVar(
daemonConf, ConfVars.LLAP_DAEMON_TASK_SCHEDULER_ENABLE_PREEMPTION);
int timedWindowAverageDataPoints = HiveConf.getIntVar(
daemonConf, ConfVars.LLAP_DAEMON_METRICS_TIMED_WINDOW_AVERAGE_DATA_POINTS);
long timedWindowAverageWindowLength = HiveConf.getTimeVar(
daemonConf, ConfVars.LLAP_DAEMON_METRICS_TIMED_WINDOW_AVERAGE_WINDOW_LENGTH, TimeUnit.NANOSECONDS);
int simpleAverageWindowDataSize = HiveConf.getIntVar(
daemonConf, ConfVars.LLAP_DAEMON_METRICS_SIMPLE_AVERAGE_DATA_POINTS);
Preconditions.checkArgument(timedWindowAverageDataPoints >= 0,
"hive.llap.daemon.metrics.timed.window.average.data.points should be greater or equal to 0");
Preconditions.checkArgument(timedWindowAverageDataPoints == 0 || timedWindowAverageWindowLength > 0,
"hive.llap.daemon.metrics.timed.window.average.window.length should be greater than 0 if " +
"hive.llap.daemon.metrics.average.timed.window.data.points is set fo greater than 0");
Preconditions.checkArgument(simpleAverageWindowDataSize >= 0,
"hive.llap.daemon.metrics.simple.average.data.points should be greater or equal to 0");
if (ioEnabled) {
int numThreads = HiveConf.getIntVar(daemonConf, HiveConf.ConfVars.LLAP_IO_THREADPOOL_SIZE);
Preconditions.checkArgument(numThreads >= numExecutors,
"hive.llap.io.threadpool.size (%s) should be greater or equal to hive.llap.daemon.num.executors (%s)",
numThreads, numExecutors);
}
final String logMsg = "Attempting to start LlapDaemon with the following configuration: " +
"maxJvmMemory=" + maxJvmMemory + " ("
+ LlapUtil.humanReadableByteCount(maxJvmMemory) + ")" +
", requestedExecutorMemory=" + executorMemoryBytes +
" (" + LlapUtil.humanReadableByteCount(executorMemoryBytes) + ")" +
", llapIoCacheSize=" + ioMemoryBytes + " ("
+ LlapUtil.humanReadableByteCount(ioMemoryBytes) + ")" +
", xmxHeadRoomMemory=" + xmxHeadRoomBytes + " ("
+ LlapUtil.humanReadableByteCount(xmxHeadRoomBytes) + ")" +
", adjustedExecutorMemory=" + executorMemoryPerInstance +
" (" + LlapUtil.humanReadableByteCount(executorMemoryPerInstance) + ")" +
", numExecutors=" + numExecutors +
", llapIoEnabled=" + ioEnabled +
", llapIoCacheIsDirect=" + isDirectCache +
", rpcListenerPort=" + srvPort +
", externalClientCloudSetupEnabled=" + externalClientCloudSetupEnabled +
", rpcListenerPortForExternalClients=" + externalClientsRpcPort +
", mngListenerPort=" + mngPort +
", webPort=" + webPort +
", outputFormatSvcPort=" + outputFormatServicePort +
", workDirs=" + Arrays.toString(localDirs) +
", shufflePort=" + shufflePort +
", waitQueueSize= " + waitQueueSize +
", enablePreemption= " + enablePreemption +
", timedWindowAverageDataPoints= " + timedWindowAverageDataPoints +
", timedWindowAverageWindowLength= " + timedWindowAverageWindowLength +
", simpleAverageWindowDataSize= " + simpleAverageWindowDataSize +
", versionInfo= (" + HiveVersionInfo.getBuildVersion() + ")";
LOG.info(logMsg);
final String currTSISO8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ").format(new Date());
// Time based log retrieval may not fetch the above log line so logging to stderr for debugging purpose.
System.err.println(currTSISO8601 + " " + logMsg);
long memRequired =
executorMemoryBytes + (ioEnabled && isDirectCache == false ? ioMemoryBytes : 0);
// TODO: this check is somewhat bogus as the maxJvmMemory != Xmx parameters (see annotation in LlapServiceDriver)
Preconditions.checkState(maxJvmMemory >= memRequired,
"Invalid configuration. Xmx value too small. maxAvailable=" + LlapUtil.humanReadableByteCount(maxJvmMemory) +
", configured(exec + io if enabled)=" + LlapUtil.humanReadableByteCount(memRequired));
this.shuffleHandlerConf = new Configuration(daemonConf);
this.shuffleHandlerConf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, shufflePort);
this.shuffleHandlerConf.set(ShuffleHandler.SHUFFLE_HANDLER_LOCAL_DIRS,
StringUtils.arrayToString(localDirs));
this.shuffleHandlerConf.setBoolean(ShuffleHandler.SHUFFLE_DIR_WATCHER_ENABLED,
HiveConf.getBoolVar(daemonConf, ConfVars.LLAP_DAEMON_SHUFFLE_DIR_WATCHER_ENABLED));
// Less frequently set parameter, not passing in as a param.
int numHandlers = HiveConf.getIntVar(daemonConf, ConfVars.LLAP_DAEMON_RPC_NUM_HANDLERS);
// Initialize the function localizer.
ClassLoader executorClassLoader = null;
if (HiveConf.getBoolVar(daemonConf, ConfVars.LLAP_DAEMON_DOWNLOAD_PERMANENT_FNS)) {
this.fnLocalizer = new FunctionLocalizer(daemonConf, localDirs[0]);
executorClassLoader = fnLocalizer.getClassLoader();
// Set up the hook that will disallow creating non-whitelisted UDFs anywhere in the plan.
// We are not using a specific hook for GenericUDFBridge - that doesn't work in MiniLlap
// because the daemon is embedded, so the client also gets this hook and Kryo is brittle.
SerializationUtilities.setGlobalHook(new LlapGlobalUdfChecker(fnLocalizer));
} else {
this.fnLocalizer = null;
SerializationUtilities.setGlobalHook(new LlapGlobalUdfChecker(new StaticPermanentFunctionChecker(daemonConf)));
executorClassLoader = Thread.currentThread().getContextClassLoader();
}
// Initialize the metrics system
LlapMetricsSystem.initialize("LlapDaemon");
this.pauseMonitor = new JvmPauseMonitor(daemonConf);
pauseMonitor.start();
String displayNameJvm = "LlapDaemonJvmMetrics-" + hostName;
String sessionId = MetricsUtils.getUUID();
LlapDaemonJvmMetrics.create(displayNameJvm, sessionId, daemonConf);
String displayName = "LlapDaemonExecutorMetrics-" + hostName;
daemonConf.set("llap.daemon.metrics.sessionid", sessionId);
String[] strIntervals = HiveConf.getTrimmedStringsVar(daemonConf,
HiveConf.ConfVars.LLAP_DAEMON_TASK_PREEMPTION_METRICS_INTERVALS);
List<Integer> intervalList = new ArrayList<>();
if (strIntervals != null) {
for (String strInterval : strIntervals) {
try {
intervalList.add(Integer.valueOf(strInterval));
} catch (NumberFormatException e) {
LOG.warn("Ignoring task pre-emption metrics interval {} from {} as it is invalid",
strInterval, Arrays.toString(strIntervals));
}
}
}
this.metrics = LlapDaemonExecutorMetrics.create(displayName, sessionId, numExecutors, waitQueueSize,
Ints.toArray(intervalList), timedWindowAverageDataPoints, timedWindowAverageWindowLength,
simpleAverageWindowDataSize);
this.metrics.setMemoryPerInstance(executorMemoryPerInstance);
this.metrics.setCacheMemoryPerInstance(ioMemoryBytes);
this.metrics.setJvmMaxMemory(maxJvmMemory);
this.metrics.getJvmMetrics().setPauseMonitor(pauseMonitor);
this.llapDaemonInfoBean = MBeans.register("LlapDaemon", "LlapDaemonInfo", this);
LOG.info("Started LlapMetricsSystem with displayName: " + displayName +
" sessionId: " + sessionId);
int maxAmReporterThreads = HiveConf.getIntVar(daemonConf, ConfVars.LLAP_DAEMON_AM_REPORTER_MAX_THREADS);
this.socketFactory = NetUtils.getDefaultSocketFactory(daemonConf);
this.amReporter = new AMReporter(numExecutors, maxAmReporterThreads, srvAddress,
new QueryFailedHandlerProxy(), daemonConf, daemonId, socketFactory);
SecretManager sm = null;
if (UserGroupInformation.isSecurityEnabled()) {
sm = SecretManager.createSecretManager(daemonConf, daemonId.getClusterString());
this.llapTokenManager = new DefaultLlapTokenManager(daemonConf, sm);
} else {
this.llapTokenManager = new DummyTokenManager();
}
this.secretManager = sm;
this.server = new LlapProtocolServerImpl(secretManager, numHandlers, this, srvAddress, mngAddress, srvPort,
externalClientsRpcPort, mngPort, daemonId, metrics).withTokenManager(this.llapTokenManager);
LlapUgiManager llapUgiManager = LlapUgiManager.getInstance(daemonConf);
QueryTracker queryTracker = new QueryTracker(daemonConf, localDirs,
daemonId.getClusterString());
String waitQueueSchedulerClassName = HiveConf.getVar(
daemonConf, ConfVars.LLAP_DAEMON_WAIT_QUEUE_COMPARATOR_CLASS_NAME);
Scheduler<TaskRunnerCallable> executorService = new TaskExecutorService(numExecutors, waitQueueSize,
waitQueueSchedulerClassName, enablePreemption, executorClassLoader, metrics, null);
addIfService(queryTracker);
addIfService(executorService);
this.containerRunner = new ContainerRunnerImpl(daemonConf, numExecutors,
this.shufflePort, srvAddress, executorMemoryPerInstance, metrics,
amReporter, queryTracker, executorService, daemonId, llapUgiManager, socketFactory);
addIfService(containerRunner);
// Not adding the registry as a service, since we need to control when it is initialized - conf used to pickup properties.
this.registry = new LlapRegistryService(true);
// disable web UI in test mode until a specific port was configured
if (HiveConf.getBoolVar(daemonConf, HiveConf.ConfVars.HIVE_IN_TEST)
&& Integer.parseInt(ConfVars.LLAP_DAEMON_WEB_PORT.getDefaultValue()) == webPort) {
LOG.info("Web UI was disabled in test mode because hive.llap.daemon.web.port was not "
+ "specified or has default value ({})", webPort);
this.webServices = null;
} else {
this.webServices = new LlapWebServices(webPort, this, registry);
addIfService(webServices);
}
if (HiveConf.getVar(daemonConf, ConfVars.HIVE_TEST_LOAD_HOSTNAMES).isEmpty()) {
this.llapLoadGeneratorService = null;
} else {
this.llapLoadGeneratorService = new LlapLoadGeneratorService();
addIfService(llapLoadGeneratorService);
}
// Bring up the server only after all other components have started.
addIfService(server);
// AMReporter after the server so that it gets the correct address. It knows how to deal with
// requests before it is started.
addIfService(amReporter);
addIfService(new LocalDirCleaner(localDirs, daemonConf));
}
private static long determineXmxHeadroom(
Configuration daemonConf, long executorMemoryBytes, long maxJvmMemory) {
String headroomStr = HiveConf.getVar(daemonConf, ConfVars.LLAP_DAEMON_XMX_HEADROOM).trim();
long xmxHeadRoomBytes = Long.MAX_VALUE;
try {
if (headroomStr.endsWith("%")) {
long percentage = Integer.parseInt(headroomStr.substring(0, headroomStr.length() - 1));
Preconditions.checkState(percentage >= 0 && percentage < 100,
"Headroom percentage should be in [0, 100) range; found " + headroomStr);
xmxHeadRoomBytes = maxJvmMemory * percentage / 100L;
} else {
xmxHeadRoomBytes = HiveConf.toSizeBytes(headroomStr);
}
} catch (NumberFormatException ex) {
throw new RuntimeException("Invalid headroom configuration " + headroomStr);
}
Preconditions.checkArgument(xmxHeadRoomBytes < executorMemoryBytes,
"LLAP daemon headroom size should be less than daemon max memory size. headRoomBytes: "
+ xmxHeadRoomBytes + " executorMemoryBytes: " + executorMemoryBytes + " (derived from "
+ headroomStr + " out of xmx of " + maxJvmMemory + ")");
return xmxHeadRoomBytes;
}
private static void initializeLogging(final Configuration conf) {
long start = System.currentTimeMillis();
String log4j2FileName = System.getenv(LlapConstants.LLAP_LOG4J2_PROPERTIES_FILE_NAME_ENV);
if (log4j2FileName == null || log4j2FileName.isEmpty()) {
log4j2FileName = LlapConstants.LOG4j2_PROPERTIES_FILE;
}
URL llap_l4j2 = LlapDaemon.class.getClassLoader().getResource(log4j2FileName);
if (llap_l4j2 != null) {
final boolean async = LogUtils.checkAndSetAsyncLogging(conf);
// required for MDC based routing appender so that child threads can inherit the MDC context
System.setProperty("isThreadContextMapInheritable", "true");
Configurator.initialize("LlapDaemonLog4j2", llap_l4j2.toString());
long end = System.currentTimeMillis();
LOG.debug("LLAP daemon logging initialized from {} in {} ms. Async: {}",
llap_l4j2, (end - start), async);
} else {
throw new RuntimeException("Log initialization failed." +
" Unable to locate " + LlapConstants.LOG4j2_PROPERTIES_FILE + " file in classpath");
}
}
public static long getTotalHeapSize() {
// runtime.getMax() gives a very different number from the actual Xmx sizing.
// you can iterate through the
// http://docs.oracle.com/javase/7/docs/api/java/lang/management/MemoryPoolMXBean.html
// from java.lang.management to figure this out, but the hard-coded params in the llap run.sh
// result in 89% usable heap (-XX:NewRatio=8) + a survivor region which is technically not
// in the usable space.
long total = 0;
for (MemoryPoolMXBean mp : ManagementFactory.getMemoryPoolMXBeans()) {
long sz = mp.getUsage().getMax();
if (mp.getName().contains("Survivor")) {
sz *= 2; // there are 2 survivor spaces
}
if (mp.getType().equals(MemoryType.HEAP)) {
total += sz;
}
}
// round up to the next MB
total += (total % (1024*1024));
return total;
}
private void printAsciiArt() {
final String asciiArt = "" +
"$$\\ $$\\ $$$$$$\\ $$$$$$$\\\n" +
"$$ | $$ | $$ __$$\\ $$ __$$\\\n" +
"$$ | $$ | $$ / $$ |$$ | $$ |\n" +
"$$ | $$ | $$$$$$$$ |$$$$$$$ |\n" +
"$$ | $$ | $$ __$$ |$$ ____/\n" +
"$$ | $$ | $$ | $$ |$$ |\n" +
"$$$$$$$$\\ $$$$$$$$\\ $$ | $$ |$$ |\n" +
"\\________|\\________|\\__| \\__|\\__|\n" +
"\n";
LOG.info("\n\n" + asciiArt);
}
@Override
public void serviceInit(Configuration conf) throws Exception {
super.serviceInit(conf);
LlapProxy.setDaemon(true);
if (fnLocalizer != null) {
fnLocalizer.init();
fnLocalizer.startLocalizeAllFunctions();
}
if (isIoEnabled()) {
LlapProxy.initializeLlapIo(conf);
}
}
@Override
public void serviceStart() throws Exception {
// Start the Shuffle service before the listener - until it's a service as well.
ShuffleHandler.initializeAndStart(shuffleHandlerConf);
LOG.info("Setting shuffle port to: " + ShuffleHandler.get().getPort());
this.shufflePort.set(ShuffleHandler.get().getPort());
getConfig()
.setInt(ConfVars.LLAP_DAEMON_YARN_SHUFFLE_PORT.varname, ShuffleHandler.get().getPort());
LlapOutputFormatService.initializeAndStart(getConfig(), secretManager);
super.serviceStart();
// Setup the actual ports in the configuration.
getConfig().setInt(ConfVars.LLAP_DAEMON_RPC_PORT.varname, server.getBindAddress().getPort());
getConfig().setInt(ConfVars.LLAP_MANAGEMENT_RPC_PORT.varname, server.getManagementBindAddress().getPort());
if (webServices != null) {
getConfig().setInt(ConfVars.LLAP_DAEMON_WEB_PORT.varname, webServices.getPort());
}
getConfig().setInt(ConfVars.LLAP_DAEMON_OUTPUT_SERVICE_PORT.varname, LlapOutputFormatService.get().getPort());
if (LlapUtil.isCloudDeployment(getConfig())) {
// this invokes JWT secret provider and tries to get shared secret.
// meant to validate shared secret as well.
new LlapExtClientJwtHelper(getConfig());
getConfig().setInt(ConfVars.LLAP_EXTERNAL_CLIENT_CLOUD_RPC_PORT.varname,
server.getExternalClientsRpcServerBindAddress().getPort());
}
// Ensure this is set in the config so that the AM can read it.
getConfig()
.setIfUnset(ConfVars.LLAP_DAEMON_TASK_SCHEDULER_WAIT_QUEUE_SIZE.varname,
ConfVars.LLAP_DAEMON_TASK_SCHEDULER_WAIT_QUEUE_SIZE
.getDefaultValue());
this.registry.init(getConfig());
this.registry.start();
LOG.info(
"LlapDaemon serviceStart complete. RPC Port={}, ManagementPort={}, ShuflePort={}, WebPort={}",
server.getBindAddress().getPort(), server.getManagementBindAddress().getPort(),
ShuffleHandler.get().getPort(), (webServices == null ? "" : webServices.getPort()));
long otelExporterFrequency =
HiveConf.getTimeVar(getConfig(), ConfVars.HIVE_OTEL_METRICS_FREQUENCY_SECONDS, TimeUnit.MILLISECONDS);
if (otelExporterFrequency > 0) {
this.otelExporter = new LLAPOTELExporter(OTELUtils.getOpenTelemetry(getConfig()), otelExporterFrequency,
server.getBindAddress().toString());
otelExporter.setName("LLAP OTEL Exporter");
otelExporter.setDaemon(true);
otelExporter.start();
LOG.info("Started OTEL exporter with frequency {}", otelExporterFrequency);
}
}
public void serviceStop() throws Exception {
if (registry != null) {
this.registry.stop();
}
super.serviceStop();
ShuffleHandler.shutdown();
shutdown();
LlapOutputFormatService.get().stop();
LOG.info("LlapDaemon shutdown complete");
}
public void shutdown() {
LOG.info("LlapDaemon shutdown invoked");
// invalidate tokens
this.llapTokenManager.close();
if (llapDaemonInfoBean != null) {
try {
MBeans.unregister(llapDaemonInfoBean);
} catch (Throwable ex) {
LOG.info("Error unregistering the bean; ignoring", ex);
}
}
if (pauseMonitor != null) {
pauseMonitor.stop();
}
if (metrics != null) {
LlapMetricsSystem.shutdown();
}
LlapProxy.close();
if (fnLocalizer != null) {
fnLocalizer.close();
}
if (otelExporter != null) {
otelExporter.interrupt();
}
}
public static void main(String[] args) throws Exception {
Thread.setDefaultUncaughtExceptionHandler(new LlapDaemonUncaughtExceptionHandler());
LlapDaemon llapDaemon = null;
try {
// Cache settings will need to be setup in llap-daemon-site.xml - since the daemons don't read hive-site.xml
// Ideally, these properties should be part of LlapDameonConf rather than HiveConf
LlapDaemonConfiguration daemonConf = new LlapDaemonConfiguration();
String containerIdStr = System.getenv(ApplicationConstants.Environment.CONTAINER_ID.name());
String appName = null;
if (containerIdStr != null && !containerIdStr.isEmpty()) {
daemonConf.set(ConfVars.LLAP_DAEMON_CONTAINER_ID.varname, containerIdStr);
appName = ConverterUtils.toContainerId(containerIdStr)
.getApplicationAttemptId().getApplicationId().toString();
} else {
daemonConf.unset(ConfVars.LLAP_DAEMON_CONTAINER_ID.varname);
// Note, we assume production LLAP always runs under YARN.
LOG.error("Cannot find " + ApplicationConstants.Environment.CONTAINER_ID.toString()
+ "; LLAP tokens may grant access to subsequent instances of the cluster with"
+ " the same name");
appName = null;
}
String nmHost = System.getenv(ApplicationConstants.Environment.NM_HOST.name());
String nmPort = System.getenv(ApplicationConstants.Environment.NM_PORT.name());
if (!org.apache.commons.lang3.StringUtils.isBlank(nmHost) && !org.apache.commons.lang3.StringUtils.isBlank(nmPort)) {
String nmAddress = nmHost + ":" + nmPort;
daemonConf.set(ConfVars.LLAP_DAEMON_NM_ADDRESS.varname, nmAddress);
} else {
daemonConf.unset(ConfVars.LLAP_DAEMON_NM_ADDRESS.varname);
// Unlikely, but log the actual values in case one of the two was empty/null
LOG.warn(
"NodeManager host/port not found in environment. Values retrieved: host={}, port={}",
nmHost, nmPort);
}
String workDirsString = System.getenv(ApplicationConstants.Environment.LOCAL_DIRS.name());
String localDirList = LlapUtil.getDaemonLocalDirString(daemonConf, workDirsString);
String[] localDirs = (localDirList == null || localDirList.isEmpty()) ?
new String[0] : StringUtils.getTrimmedStrings(localDirList);
int rpcPort = HiveConf.getIntVar(daemonConf, ConfVars.LLAP_DAEMON_RPC_PORT);
int externalClientCloudRpcPort = HiveConf.getIntVar(daemonConf, ConfVars.LLAP_EXTERNAL_CLIENT_CLOUD_RPC_PORT);
boolean externalClientCloudSetupEnabled = LlapUtil.isCloudDeployment(daemonConf);
int mngPort = HiveConf.getIntVar(daemonConf, ConfVars.LLAP_MANAGEMENT_RPC_PORT);
int shufflePort = HiveConf.getIntVar(daemonConf, ConfVars.LLAP_DAEMON_YARN_SHUFFLE_PORT);
int webPort = HiveConf.getIntVar(daemonConf, ConfVars.LLAP_DAEMON_WEB_PORT);
LlapDaemonInfo.initialize(appName, daemonConf);
int numExecutors = LlapDaemonInfo.INSTANCE.getNumExecutors();
long executorMemoryBytes = LlapDaemonInfo.INSTANCE.getExecutorMemory();
long ioMemoryBytes = LlapDaemonInfo.INSTANCE.getCacheSize();
boolean isDirectCache = LlapDaemonInfo.INSTANCE.isDirectCache();
boolean isLlapIo = LlapDaemonInfo.INSTANCE.isLlapIo();
daemonConf.set(Constants.CLUSTER_ID_HIVE_CONF_PROP, ServiceContext.findClusterId());
LlapDaemon.initializeLogging(daemonConf);
llapDaemon =
new LlapDaemon(daemonConf, numExecutors, executorMemoryBytes, isLlapIo, isDirectCache,
ioMemoryBytes, localDirs, rpcPort, externalClientCloudSetupEnabled,
externalClientCloudRpcPort, mngPort, shufflePort, webPort, appName);
LOG.info("Adding shutdown hook for LlapDaemon");
ShutdownHookManager.addShutdownHook(new CompositeServiceShutdownHook(llapDaemon), 1);
llapDaemon.init(daemonConf);
llapDaemon.start();
LOG.info("Started LlapDaemon with PID: {}", LlapDaemonInfo.INSTANCE.getPID());
// Relying on the RPC threads to keep the service alive.
} catch (Throwable t) {
// TODO Replace this with a ExceptionHandler / ShutdownHook
LOG.error("Failed to start LLAP Daemon with exception", t);
if (llapDaemon != null) {
llapDaemon.shutdown();
}
ExitUtil.terminate(-1);
}
}
@Override
public LlapDaemonProtocolProtos.RegisterDagResponseProto registerDag(
LlapDaemonProtocolProtos.RegisterDagRequestProto request)
throws IOException {
return containerRunner.registerDag(request);
}
@Override
public SubmitWorkResponseProto submitWork(
SubmitWorkRequestProto request) throws IOException {
numSubmissions.incrementAndGet();
return containerRunner.submitWork(request);
}
@Override
public SourceStateUpdatedResponseProto sourceStateUpdated(
SourceStateUpdatedRequestProto request) throws IOException {
return containerRunner.sourceStateUpdated(request);
}
@Override
public QueryCompleteResponseProto queryComplete(
QueryCompleteRequestProto request) throws IOException {
return containerRunner.queryComplete(request);
}
@Override
public TerminateFragmentResponseProto terminateFragment(
TerminateFragmentRequestProto request) throws IOException {
return containerRunner.terminateFragment(request);
}
@Override
public UpdateFragmentResponseProto updateFragment(
UpdateFragmentRequestProto request) throws IOException {
return containerRunner.updateFragment(request);
}
@Override
public SetCapacityResponseProto setCapacity(
SetCapacityRequestProto request) throws IOException {
Map<String, String> capacityValues = new HashMap<>(2);
capacityValues.put(LlapRegistryService.LLAP_DAEMON_NUM_ENABLED_EXECUTORS,
Integer.toString(request.getExecutorNum()));
capacityValues.put(LlapRegistryService.LLAP_DAEMON_TASK_SCHEDULER_ENABLED_WAIT_QUEUE_SIZE,
Integer.toString(request.getQueueSize()));
registry.updateRegistration(capacityValues.entrySet());
return containerRunner.setCapacity(request);
}
@VisibleForTesting
public long getNumSubmissions() {
return numSubmissions.get();
}
public InetSocketAddress getListenerAddress() {
return server.getBindAddress();
}
// LlapDaemonMXBean methods. Will be exposed via JMX
@Override
public int getRpcPort() {
return server.getBindAddress().getPort();
}
@Override
public int getNumExecutors() {
return numExecutors;
}
@Override
public int getShufflePort() {
return ShuffleHandler.get().getPort();
}
@Override
public String getLocalDirs() {
return Joiner.on(",").skipNulls().join(localDirs);
}
@Override
public Set<String> getExecutorsStatus() {
return containerRunner.getExecutorStatus();
}
@Override
public int getNumActive() {
return containerRunner.getNumActive();
}
@Override
public long getExecutorMemoryPerInstance() {
return executorMemoryPerInstance;
}
@Override
public long getIoMemoryPerInstance() {
return ioMemoryPerInstance;
}
@Override
public boolean isIoEnabled() {
return llapIoEnabled;
}
@Override
public long getMaxJvmMemory() {
return maxJvmMemory;
}
/**
* A global hook that checks all subclasses of GenericUDF against the whitelist. It also injects
* us into GenericUDFBridge-s, to check with the whitelist before instantiating a UDF.
*/
private static final class LlapGlobalUdfChecker extends SerializationUtilities.Hook {
private UdfWhitelistChecker fnCheckerImpl;
public LlapGlobalUdfChecker(UdfWhitelistChecker fnCheckerImpl) {
this.fnCheckerImpl = fnCheckerImpl;
}
@Override
public boolean preRead(Class<?> type) {
// 1) Don't call postRead - we will have checked everything here.
// 2) Ignore GenericUDFBridge, it's checked separately in LlapUdfBridgeChecker.
if (GenericUDFBridge.class == type) return true; // Run post-hook.
if (!(GenericUDF.class.isAssignableFrom(type) || UDF.class.isAssignableFrom(type))
|| fnCheckerImpl.isUdfAllowed(type)) return false;
throw new SecurityException("UDF " + type.getCanonicalName() + " is not allowed");
}
@Override
public Object postRead(Object o) {
if (o == null) return o;
Class<?> type = o.getClass();
if (GenericUDFBridge.class == type) {
((GenericUDFBridge)o).setUdfChecker(fnCheckerImpl);
}
// This won't usually be called otherwise.
preRead(type);
return o;
}
}
private static class LlapDaemonUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {
@Override
public void uncaughtException(Thread t, Throwable e) {
LOG.info("UncaughtExceptionHandler invoked");
if(ShutdownHookManager.isShutdownInProgress()) {
LOG.warn("Thread {} threw a Throwable, but we are shutting down, so ignoring this", t, e);
} else if(e instanceof Error) {
try {
LOG.error("Thread {} threw an Error. Shutting down now...", t, e);
} catch (Throwable err) {
//We don't want to not exit because of an issue with logging
}
if(e instanceof OutOfMemoryError) {
//After catching an OOM java says it is undefined behavior, so don't
//even try to clean up or we can get stuck on shutdown.
try {
System.err.println("Halting due to Out Of Memory Error...");
e.printStackTrace();
} catch (Throwable err) {
//Again we done want to exit because of logging issues.
}
ExitUtil.halt(-1);
} else {
ExitUtil.terminate(-1);
}
} else {
LOG.error("Thread {} threw an Exception. Shutting down now...", t, e);
ExitUtil.terminate(-1);
}
}
}
private class QueryFailedHandlerProxy implements QueryFailedHandler {
@Override
public void queryFailed(QueryIdentifier queryIdentifier) {
containerRunner.queryFailed(queryIdentifier);
}
}
}
|
googlesamples/androidtv-sample-inputs | 38,387 | library/src/main/java/com/google/android/media/tv/companionlibrary/model/Program.java | /*
* Copyright 2015 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.media.tv.companionlibrary.model;
import android.content.ContentValues;
import android.database.Cursor;
import android.media.tv.TvContentRating;
import android.media.tv.TvContract;
import android.os.Build;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import com.google.android.media.tv.companionlibrary.utils.CollectionUtils;
import com.google.android.media.tv.companionlibrary.utils.TvContractUtils;
import java.util.Arrays;
import java.util.Objects;
/** A convenience class to create and insert program information into the database. */
public final class Program implements Comparable<Program> {
/** @hide */
public static final String[] PROJECTION = getProjection();
private static final long INVALID_LONG_VALUE = -1;
private static final int INVALID_INT_VALUE = -1;
private static final int IS_RECORDING_PROHIBITED = 1;
private static final int IS_SEARCHABLE = 1;
private long mId;
private long mChannelId;
private String mTitle;
private String mEpisodeTitle;
private String mSeasonNumber;
private String mEpisodeNumber;
private long mStartTimeUtcMillis;
private long mEndTimeUtcMillis;
private String mDescription;
private String mLongDescription;
private int mVideoWidth;
private int mVideoHeight;
private String mPosterArtUri;
private String mThumbnailUri;
private String[] mBroadcastGenres;
private String[] mCanonicalGenres;
private TvContentRating[] mContentRatings;
private byte[] mInternalProviderData;
private String mAudioLanguages;
private int mRecordingProhibited;
private int mSearchable;
private String mSeasonTitle;
private Program() {
mChannelId = INVALID_LONG_VALUE;
mId = INVALID_LONG_VALUE;
mStartTimeUtcMillis = INVALID_LONG_VALUE;
mEndTimeUtcMillis = INVALID_LONG_VALUE;
mVideoWidth = INVALID_INT_VALUE;
mVideoHeight = INVALID_INT_VALUE;
mSearchable = IS_SEARCHABLE;
}
/** @return The value of {@link TvContract.Programs#_ID} for the channel. */
public long getId() {
return mId;
}
/** @return The value of {@link TvContract.Programs#COLUMN_CHANNEL_ID} for the channel. */
public long getChannelId() {
return mChannelId;
}
/** @return The value of {@link TvContract.Programs#COLUMN_TITLE} for the channel. */
public String getTitle() {
return mTitle;
}
/** @return The value of {@link TvContract.Programs#COLUMN_EPISODE_TITLE} for the channel. */
public String getEpisodeTitle() {
return mEpisodeTitle;
}
/**
* @return The value of {@link TvContract.Programs#COLUMN_SEASON_DISPLAY_NUMBER} for the
* channel.
*/
public String getSeasonNumber() {
return mSeasonNumber;
}
/**
* @return The value of {@link TvContract.Programs#COLUMN_EPISODE_DISPLAY_NUMBER} for the
* channel.
*/
public String getEpisodeNumber() {
return mEpisodeNumber;
}
/**
* @return The value of {@link TvContract.Programs#COLUMN_START_TIME_UTC_MILLIS} for the
* channel.
*/
public long getStartTimeUtcMillis() {
return mStartTimeUtcMillis;
}
/**
* @return The value of {@link TvContract.Programs#COLUMN_END_TIME_UTC_MILLIS} for the channel.
*/
public long getEndTimeUtcMillis() {
return mEndTimeUtcMillis;
}
/**
* @return The value of {@link TvContract.Programs#COLUMN_SHORT_DESCRIPTION} for the channel.
*/
public String getDescription() {
return mDescription;
}
/** @return The value of {@link TvContract.Programs#COLUMN_LONG_DESCRIPTION} for the channel. */
public String getLongDescription() {
return mLongDescription;
}
/** @return The value of {@link TvContract.Programs#COLUMN_VIDEO_WIDTH} for the channel. */
public int getVideoWidth() {
return mVideoWidth;
}
/** @return The value of {@link TvContract.Programs#COLUMN_VIDEO_HEIGHT} for the channel. */
public int getVideoHeight() {
return mVideoHeight;
}
/** @return The value of {@link TvContract.Programs#COLUMN_BROADCAST_GENRE} for the channel. */
public String[] getBroadcastGenres() {
return mBroadcastGenres;
}
/** @return The value of {@link TvContract.Programs#COLUMN_CANONICAL_GENRE} for the channel. */
public String[] getCanonicalGenres() {
return mCanonicalGenres;
}
/** @return The value of {@link TvContract.Programs#COLUMN_CONTENT_RATING} for the channel. */
public TvContentRating[] getContentRatings() {
return mContentRatings;
}
/** @return The value of {@link TvContract.Programs#COLUMN_POSTER_ART_URI} for the channel. */
public String getPosterArtUri() {
return mPosterArtUri;
}
/** @return The value of {@link TvContract.Programs#COLUMN_THUMBNAIL_URI} for the channel. */
public String getThumbnailUri() {
return mThumbnailUri;
}
/**
* @return The value of {@link TvContract.Channels#COLUMN_INTERNAL_PROVIDER_DATA} for the
* channel.
*/
public byte[] getInternalProviderDataByteArray() {
return mInternalProviderData;
}
/**
* @return The value of {@link TvContract.Programs#COLUMN_INTERNAL_PROVIDER_DATA} for the
* channel.
*/
public InternalProviderData getInternalProviderData() {
if (mInternalProviderData != null) {
try {
return new InternalProviderData(mInternalProviderData);
} catch (InternalProviderData.ParseException e) {
return null;
}
}
return null;
}
/** @return The value of {@link TvContract.Programs#COLUMN_AUDIO_LANGUAGE} for the channel. */
public String getAudioLanguages() {
return mAudioLanguages;
}
/**
* @return The value of {@link TvContract.Programs#COLUMN_RECORDING_PROHIBITED} for the channel.
*/
public boolean isRecordingProhibited() {
return mRecordingProhibited == IS_RECORDING_PROHIBITED;
}
/**
* @return The value of {@link TvContract.Programs#COLUMN_RECORDING_PROHIBITED} for the channel.
*/
public boolean isSearchable() {
return mSearchable == IS_SEARCHABLE;
}
/** @return The value of {@link TvContract.Programs#COLUMN_SEASON_TITLE} for the channel. */
public String getSeasonTitle() {
return mSeasonTitle;
}
@Override
public int hashCode() {
return Objects.hash(
mChannelId,
mStartTimeUtcMillis,
mEndTimeUtcMillis,
mTitle,
mEpisodeTitle,
mDescription,
mLongDescription,
mVideoWidth,
mVideoHeight,
mPosterArtUri,
mThumbnailUri,
Arrays.hashCode(mContentRatings),
Arrays.hashCode(mCanonicalGenres),
mSeasonNumber,
mEpisodeNumber);
}
@Override
public boolean equals(Object other) {
if (!(other instanceof Program)) {
return false;
}
Program program = (Program) other;
return mChannelId == program.mChannelId
&& mStartTimeUtcMillis == program.mStartTimeUtcMillis
&& mEndTimeUtcMillis == program.mEndTimeUtcMillis
&& Objects.equals(mTitle, program.mTitle)
&& Objects.equals(mEpisodeTitle, program.mEpisodeTitle)
&& Objects.equals(mDescription, program.mDescription)
&& Objects.equals(mLongDescription, program.mLongDescription)
&& mVideoWidth == program.mVideoWidth
&& mVideoHeight == program.mVideoHeight
&& Objects.equals(mPosterArtUri, program.mPosterArtUri)
&& Objects.equals(mThumbnailUri, program.mThumbnailUri)
&& Arrays.equals(mInternalProviderData, program.mInternalProviderData)
&& Arrays.equals(mContentRatings, program.mContentRatings)
&& Arrays.equals(mCanonicalGenres, program.mCanonicalGenres)
&& Objects.equals(mSeasonNumber, program.mSeasonNumber)
&& Objects.equals(mEpisodeNumber, program.mEpisodeNumber);
}
/**
* @param other The program you're comparing to.
* @return The chronological order of the programs.
*/
@Override
public int compareTo(@NonNull Program other) {
return Long.compare(mStartTimeUtcMillis, other.mStartTimeUtcMillis);
}
@Override
public String toString() {
return "Program{"
+ "id="
+ mId
+ ", channelId="
+ mChannelId
+ ", title="
+ mTitle
+ ", episodeTitle="
+ mEpisodeTitle
+ ", seasonNumber="
+ mSeasonNumber
+ ", episodeNumber="
+ mEpisodeNumber
+ ", startTimeUtcSec="
+ mStartTimeUtcMillis
+ ", endTimeUtcSec="
+ mEndTimeUtcMillis
+ ", videoWidth="
+ mVideoWidth
+ ", videoHeight="
+ mVideoHeight
+ ", contentRatings="
+ Arrays.toString(mContentRatings)
+ ", posterArtUri="
+ mPosterArtUri
+ ", thumbnailUri="
+ mThumbnailUri
+ ", contentRatings="
+ Arrays.toString(mContentRatings)
+ ", genres="
+ Arrays.toString(mCanonicalGenres)
+ "}";
}
private void copyFrom(Program other) {
if (this == other) {
return;
}
mId = other.mId;
mChannelId = other.mChannelId;
mTitle = other.mTitle;
mEpisodeTitle = other.mEpisodeTitle;
mSeasonNumber = other.mSeasonNumber;
mEpisodeNumber = other.mEpisodeNumber;
mStartTimeUtcMillis = other.mStartTimeUtcMillis;
mEndTimeUtcMillis = other.mEndTimeUtcMillis;
mDescription = other.mDescription;
mLongDescription = other.mLongDescription;
mVideoWidth = other.mVideoWidth;
mVideoHeight = other.mVideoHeight;
mPosterArtUri = other.mPosterArtUri;
mThumbnailUri = other.mThumbnailUri;
mBroadcastGenres = other.mBroadcastGenres;
mCanonicalGenres = other.mCanonicalGenres;
mContentRatings = other.mContentRatings;
mAudioLanguages = other.mAudioLanguages;
mRecordingProhibited = other.mRecordingProhibited;
mSearchable = other.mSearchable;
mSeasonTitle = other.mSeasonTitle;
mInternalProviderData = other.mInternalProviderData;
}
/**
* @return The fields of the Program in the ContentValues format to be easily inserted into the
* TV Input Framework database.
* @hide
*/
public ContentValues toContentValues() {
ContentValues values = new ContentValues();
if (mId != INVALID_LONG_VALUE) {
values.put(TvContract.Programs._ID, mId);
}
if (mChannelId != INVALID_LONG_VALUE) {
values.put(TvContract.Programs.COLUMN_CHANNEL_ID, mChannelId);
} else {
values.putNull(TvContract.Programs.COLUMN_CHANNEL_ID);
}
if (!TextUtils.isEmpty(mTitle)) {
values.put(TvContract.Programs.COLUMN_TITLE, mTitle);
} else {
values.putNull(TvContract.Programs.COLUMN_TITLE);
}
if (!TextUtils.isEmpty(mEpisodeTitle)) {
values.put(TvContract.Programs.COLUMN_EPISODE_TITLE, mEpisodeTitle);
} else {
values.putNull(TvContract.Programs.COLUMN_EPISODE_TITLE);
}
if (!TextUtils.isEmpty(mSeasonNumber) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
values.put(TvContract.Programs.COLUMN_SEASON_DISPLAY_NUMBER, mSeasonNumber);
} else if (!TextUtils.isEmpty(mSeasonNumber)
&& Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
values.put(TvContract.Programs.COLUMN_SEASON_NUMBER, Integer.parseInt(mSeasonNumber));
} else {
values.putNull(TvContract.Programs.COLUMN_SEASON_NUMBER);
}
if (!TextUtils.isEmpty(mEpisodeNumber) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
values.put(TvContract.Programs.COLUMN_EPISODE_DISPLAY_NUMBER, mEpisodeNumber);
} else if (!TextUtils.isEmpty(mEpisodeNumber)
&& Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
values.put(TvContract.Programs.COLUMN_EPISODE_NUMBER, Integer.parseInt(mEpisodeNumber));
} else {
values.putNull(TvContract.Programs.COLUMN_EPISODE_NUMBER);
}
if (!TextUtils.isEmpty(mDescription)) {
values.put(TvContract.Programs.COLUMN_SHORT_DESCRIPTION, mDescription);
} else {
values.putNull(TvContract.Programs.COLUMN_SHORT_DESCRIPTION);
}
if (!TextUtils.isEmpty(mDescription)) {
values.put(TvContract.Programs.COLUMN_LONG_DESCRIPTION, mLongDescription);
} else {
values.putNull(TvContract.Programs.COLUMN_LONG_DESCRIPTION);
}
if (!TextUtils.isEmpty(mPosterArtUri)) {
values.put(TvContract.Programs.COLUMN_POSTER_ART_URI, mPosterArtUri);
} else {
values.putNull(TvContract.Programs.COLUMN_POSTER_ART_URI);
}
if (!TextUtils.isEmpty(mThumbnailUri)) {
values.put(TvContract.Programs.COLUMN_THUMBNAIL_URI, mThumbnailUri);
} else {
values.putNull(TvContract.Programs.COLUMN_THUMBNAIL_URI);
}
if (!TextUtils.isEmpty(mAudioLanguages)) {
values.put(TvContract.Programs.COLUMN_AUDIO_LANGUAGE, mAudioLanguages);
} else {
values.putNull(TvContract.Programs.COLUMN_AUDIO_LANGUAGE);
}
if (mBroadcastGenres != null && mBroadcastGenres.length > 0) {
values.put(
TvContract.Programs.COLUMN_BROADCAST_GENRE,
TvContract.Programs.Genres.encode(mBroadcastGenres));
} else {
values.putNull(TvContract.Programs.COLUMN_BROADCAST_GENRE);
}
if (mCanonicalGenres != null && mCanonicalGenres.length > 0) {
values.put(
TvContract.Programs.COLUMN_CANONICAL_GENRE,
TvContract.Programs.Genres.encode(mCanonicalGenres));
} else {
values.putNull(TvContract.Programs.COLUMN_CANONICAL_GENRE);
}
if (mContentRatings != null && mContentRatings.length > 0) {
values.put(
TvContract.Programs.COLUMN_CONTENT_RATING,
TvContractUtils.contentRatingsToString(mContentRatings));
} else {
values.putNull(TvContract.Programs.COLUMN_CONTENT_RATING);
}
if (mStartTimeUtcMillis != INVALID_LONG_VALUE) {
values.put(TvContract.Programs.COLUMN_START_TIME_UTC_MILLIS, mStartTimeUtcMillis);
} else {
values.putNull(TvContract.Programs.COLUMN_START_TIME_UTC_MILLIS);
}
if (mEndTimeUtcMillis != INVALID_LONG_VALUE) {
values.put(TvContract.Programs.COLUMN_END_TIME_UTC_MILLIS, mEndTimeUtcMillis);
} else {
values.putNull(TvContract.Programs.COLUMN_END_TIME_UTC_MILLIS);
}
if (mVideoWidth != INVALID_INT_VALUE) {
values.put(TvContract.Programs.COLUMN_VIDEO_WIDTH, mVideoWidth);
} else {
values.putNull(TvContract.Programs.COLUMN_VIDEO_WIDTH);
}
if (mVideoHeight != INVALID_INT_VALUE) {
values.put(TvContract.Programs.COLUMN_VIDEO_HEIGHT, mVideoHeight);
} else {
values.putNull(TvContract.Programs.COLUMN_VIDEO_HEIGHT);
}
if (mInternalProviderData != null && mInternalProviderData.length > 0) {
values.put(TvContract.Programs.COLUMN_INTERNAL_PROVIDER_DATA, mInternalProviderData);
} else {
values.putNull(TvContract.Programs.COLUMN_INTERNAL_PROVIDER_DATA);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
values.put(TvContract.Programs.COLUMN_SEARCHABLE, mSearchable);
}
if (!TextUtils.isEmpty(mSeasonTitle) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
values.put(TvContract.Programs.COLUMN_SEASON_TITLE, mSeasonTitle);
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
values.putNull(TvContract.Programs.COLUMN_SEASON_TITLE);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
values.put(TvContract.Programs.COLUMN_RECORDING_PROHIBITED, mRecordingProhibited);
}
return values;
}
/**
* Creates a Program object from a cursor including the fields defined in {@link
* TvContract.Programs}.
*
* @param cursor A row from the TV Input Framework database.
* @return A Program with the values taken from the cursor.
* @hide
*/
public static Program fromCursor(Cursor cursor) {
Builder builder = new Builder();
int index = 0;
if (!cursor.isNull(index)) {
builder.setId(cursor.getLong(index));
}
if (!cursor.isNull(++index)) {
builder.setChannelId(cursor.getLong(index));
}
if (!cursor.isNull(++index)) {
builder.setTitle(cursor.getString(index));
}
if (!cursor.isNull(++index)) {
builder.setEpisodeTitle(cursor.getString(index));
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
if (!cursor.isNull(++index)) {
builder.setSeasonNumber(cursor.getString(index), INVALID_INT_VALUE);
}
} else {
if (!cursor.isNull(++index)) {
builder.setSeasonNumber(cursor.getInt(index));
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
if (!cursor.isNull(++index)) {
builder.setEpisodeNumber(cursor.getString(index), INVALID_INT_VALUE);
}
} else {
if (!cursor.isNull(++index)) {
builder.setEpisodeNumber(cursor.getInt(index));
}
}
if (!cursor.isNull(++index)) {
builder.setDescription(cursor.getString(index));
}
if (!cursor.isNull(++index)) {
builder.setLongDescription(cursor.getString(index));
}
if (!cursor.isNull(++index)) {
builder.setPosterArtUri(cursor.getString(index));
}
if (!cursor.isNull(++index)) {
builder.setThumbnailUri(cursor.getString(index));
}
if (!cursor.isNull(++index)) {
builder.setAudioLanguages(cursor.getString(index));
}
if (!cursor.isNull(++index)) {
builder.setBroadcastGenres(TvContract.Programs.Genres.decode(cursor.getString(index)));
}
if (!cursor.isNull(++index)) {
builder.setCanonicalGenres(TvContract.Programs.Genres.decode(cursor.getString(index)));
}
if (!cursor.isNull(++index)) {
builder.setContentRatings(
TvContractUtils.stringToContentRatings(cursor.getString(index)));
}
if (!cursor.isNull(++index)) {
builder.setStartTimeUtcMillis(cursor.getLong(index));
}
if (!cursor.isNull(++index)) {
builder.setEndTimeUtcMillis(cursor.getLong(index));
}
if (!cursor.isNull(++index)) {
builder.setVideoWidth((int) cursor.getLong(index));
}
if (!cursor.isNull(++index)) {
builder.setVideoHeight((int) cursor.getLong(index));
}
if (!cursor.isNull(++index)) {
builder.setInternalProviderData(cursor.getBlob(index));
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (!cursor.isNull(++index)) {
builder.setSearchable(cursor.getInt(index) == IS_SEARCHABLE);
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
if (!cursor.isNull(++index)) {
builder.setSeasonTitle(cursor.getString(index));
}
if (!cursor.isNull(++index)) {
builder.setRecordingProhibited(cursor.getInt(index) == IS_RECORDING_PROHIBITED);
}
}
return builder.build();
}
private static String[] getProjection() {
String[] baseColumns =
new String[] {
TvContract.Programs._ID,
TvContract.Programs.COLUMN_CHANNEL_ID,
TvContract.Programs.COLUMN_TITLE,
TvContract.Programs.COLUMN_EPISODE_TITLE,
(Build.VERSION.SDK_INT >= Build.VERSION_CODES.N)
? TvContract.Programs.COLUMN_SEASON_DISPLAY_NUMBER
: TvContract.Programs.COLUMN_SEASON_NUMBER,
(Build.VERSION.SDK_INT >= Build.VERSION_CODES.N)
? TvContract.Programs.COLUMN_EPISODE_DISPLAY_NUMBER
: TvContract.Programs.COLUMN_EPISODE_NUMBER,
TvContract.Programs.COLUMN_SHORT_DESCRIPTION,
TvContract.Programs.COLUMN_LONG_DESCRIPTION,
TvContract.Programs.COLUMN_POSTER_ART_URI,
TvContract.Programs.COLUMN_THUMBNAIL_URI,
TvContract.Programs.COLUMN_AUDIO_LANGUAGE,
TvContract.Programs.COLUMN_BROADCAST_GENRE,
TvContract.Programs.COLUMN_CANONICAL_GENRE,
TvContract.Programs.COLUMN_CONTENT_RATING,
TvContract.Programs.COLUMN_START_TIME_UTC_MILLIS,
TvContract.Programs.COLUMN_END_TIME_UTC_MILLIS,
TvContract.Programs.COLUMN_VIDEO_WIDTH,
TvContract.Programs.COLUMN_VIDEO_HEIGHT,
TvContract.Programs.COLUMN_INTERNAL_PROVIDER_DATA
};
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
String[] marshmallowColumns = new String[] {TvContract.Programs.COLUMN_SEARCHABLE};
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
String[] nougatColumns =
new String[] {
TvContract.Programs.COLUMN_SEASON_TITLE,
TvContract.Programs.COLUMN_RECORDING_PROHIBITED
};
return CollectionUtils.concatAll(baseColumns, marshmallowColumns, nougatColumns);
} else {
return CollectionUtils.concatAll(baseColumns, marshmallowColumns);
}
}
return baseColumns;
}
/** This Builder class simplifies the creation of a {@link Program} object. */
public static final class Builder {
private final Program mProgram;
/** Creates a new Builder object. */
public Builder() {
mProgram = new Program();
}
/**
* Creates a new Builder object with values copied from another Program.
*
* @param other The Program you're copying from.
*/
public Builder(Program other) {
mProgram = new Program();
mProgram.copyFrom(other);
}
/**
* Creates a new Builder object with values from the Channel this program is playing on.
*
* @param channel The Channel that contains this Program
*/
public Builder(Channel channel) {
mProgram = new Program();
mProgram.mChannelId = channel.getId();
mProgram.mDescription = channel.getDescription();
mProgram.mInternalProviderData = channel.getInternalProviderDataByteArray();
mProgram.mThumbnailUri = channel.getChannelLogo();
mProgram.mTitle = channel.getDisplayName();
}
/**
* Sets a unique id for this program.
*
* @param programId The value of {@link TvContract.Programs#_ID} for the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
private Builder setId(long programId) {
mProgram.mId = programId;
return this;
}
/**
* Sets the ID of the {@link Channel} that contains this program.
*
* @param channelId The value of {@link TvContract.Programs#COLUMN_CHANNEL_ID for the
* program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setChannelId(long channelId) {
mProgram.mChannelId = channelId;
return this;
}
/**
* Sets the title of this program. For a series, this is the series title.
*
* @param title The value of {@link TvContract.Programs#COLUMN_TITLE} for the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setTitle(String title) {
mProgram.mTitle = title;
return this;
}
/**
* Sets the title of this particular episode for a series.
*
* @param episodeTitle The value of {@link TvContract.Programs#COLUMN_EPISODE_TITLE} for the
* program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setEpisodeTitle(String episodeTitle) {
mProgram.mEpisodeTitle = episodeTitle;
return this;
}
/**
* Sets the season number for this episode for a series.
*
* @param seasonNumber The value of {@link TvContract.Programs#COLUMN_SEASON_DISPLAY_NUMBER}
* for the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setSeasonNumber(int seasonNumber) {
mProgram.mSeasonNumber = String.valueOf(seasonNumber);
return this;
}
/**
* Sets the season number for this episode for a series.
*
* @param seasonNumber The value of {@link TvContract.Programs#COLUMN_SEASON_NUMBER} for the
* program.
* @param numericalSeasonNumber An integer value for {@link
* TvContract.Programs#COLUMN_SEASON_NUMBER} which will be used for API Level 23 and
* below.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setSeasonNumber(String seasonNumber, int numericalSeasonNumber) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
mProgram.mSeasonNumber = seasonNumber;
} else {
mProgram.mSeasonNumber = String.valueOf(numericalSeasonNumber);
}
return this;
}
/**
* Sets the episode number in a season for this episode for a series.
*
* @param episodeNumber The value of {@link
* TvContract.Programs#COLUMN_EPISODE_DISPLAY_NUMBER} for the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setEpisodeNumber(int episodeNumber) {
mProgram.mEpisodeNumber = String.valueOf(episodeNumber);
return this;
}
/**
* Sets the episode number in a season for this episode for a series.
*
* @param episodeNumber The value of {@link
* TvContract.Programs#COLUMN_EPISODE_DISPLAY_NUMBER} for the program.
* @param numericalEpisodeNumber An integer value for {@link
* TvContract.Programs#COLUMN_SEASON_NUMBER} which will be used for API Level 23 and
* below.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setEpisodeNumber(String episodeNumber, int numericalEpisodeNumber) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
mProgram.mEpisodeNumber = episodeNumber;
} else {
mProgram.mEpisodeNumber = String.valueOf(numericalEpisodeNumber);
}
return this;
}
/**
* Sets the time when the program is going to begin in milliseconds since the epoch.
*
* @param startTimeUtcMillis The value of {@link
* TvContract.Programs#COLUMN_START_TIME_UTC_MILLIS} for the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setStartTimeUtcMillis(long startTimeUtcMillis) {
mProgram.mStartTimeUtcMillis = startTimeUtcMillis;
return this;
}
/**
* Sets the time when this program is going to end in milliseconds since the epoch.
*
* @param endTimeUtcMillis The value of {@link
* TvContract.Programs#COLUMN_END_TIME_UTC_MILLIS} for the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setEndTimeUtcMillis(long endTimeUtcMillis) {
mProgram.mEndTimeUtcMillis = endTimeUtcMillis;
return this;
}
/**
* Sets a brief description of the program. For a series, this would be a brief description
* of the episode.
*
* @param description The value of {@link TvContract.Programs#COLUMN_SHORT_DESCRIPTION} for
* the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setDescription(String description) {
mProgram.mDescription = description;
return this;
}
/**
* Sets a longer description of a program if one exists.
*
* @param longDescription The value of {@link TvContract.Programs#COLUMN_LONG_DESCRIPTION}
* for the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setLongDescription(String longDescription) {
mProgram.mLongDescription = longDescription;
return this;
}
/**
* Sets the video width of the program.
*
* @param width The value of {@link TvContract.Programs#COLUMN_VIDEO_WIDTH} for the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setVideoWidth(int width) {
mProgram.mVideoWidth = width;
return this;
}
/**
* Sets the video height of the program.
*
* @param height The value of {@link TvContract.Programs#COLUMN_VIDEO_HEIGHT} for the
* program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setVideoHeight(int height) {
mProgram.mVideoHeight = height;
return this;
}
/**
* Sets the content ratings for this program.
*
* @param contentRatings An array of {@link TvContentRating} that apply to this program
* which will be flattened to a String to store in a database.
* @return This Builder object to allow for chaining of calls to builder methods.
* @see TvContract.Programs#COLUMN_CONTENT_RATING
*/
public Builder setContentRatings(TvContentRating[] contentRatings) {
mProgram.mContentRatings = contentRatings;
return this;
}
/**
* Sets the large poster art of the program.
*
* @param posterArtUri The value of {@link TvContract.Programs#COLUMN_POSTER_ART_URI} for
* the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setPosterArtUri(String posterArtUri) {
mProgram.mPosterArtUri = posterArtUri;
return this;
}
/**
* Sets a small thumbnail of the program.
*
* @param thumbnailUri The value of {@link TvContract.Programs#COLUMN_THUMBNAIL_URI} for the
* program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setThumbnailUri(String thumbnailUri) {
mProgram.mThumbnailUri = thumbnailUri;
return this;
}
/**
* Sets the broadcast-specified genres of the program.
*
* @param genres Array of genres that apply to the program based on the broadcast standard
* which will be flattened to a String to store in a database.
* @return This Builder object to allow for chaining of calls to builder methods.
* @see TvContract.Programs#COLUMN_BROADCAST_GENRE
*/
public Builder setBroadcastGenres(String[] genres) {
mProgram.mBroadcastGenres = genres;
return this;
}
/**
* Sets the genres of the program.
*
* @param genres An array of {@link TvContract.Programs.Genres} that apply to the program
* which will be flattened to a String to store in a database.
* @return This Builder object to allow for chaining of calls to builder methods.
* @see TvContract.Programs#COLUMN_CANONICAL_GENRE
*/
public Builder setCanonicalGenres(String[] genres) {
mProgram.mCanonicalGenres = genres;
return this;
}
/**
* Sets the internal provider data for the program as raw bytes.
*
* @param data The value of {@link TvContract.Programs#COLUMN_INTERNAL_PROVIDER_DATA} for
* the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setInternalProviderData(byte[] data) {
mProgram.mInternalProviderData = data;
return this;
}
/**
* Sets the internal provider data for the program.
*
* @param internalProviderData The value of {@link
* TvContract.Programs#COLUMN_INTERNAL_PROVIDER_DATA} for the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setInternalProviderData(InternalProviderData internalProviderData) {
if (internalProviderData != null) {
mProgram.mInternalProviderData = internalProviderData.toString().getBytes();
}
return this;
}
/**
* Sets the available audio languages for this program as a comma-separated String.
*
* @param audioLanguages The value of {@link TvContract.Programs#COLUMN_AUDIO_LANGUAGE} for
* the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setAudioLanguages(String audioLanguages) {
mProgram.mAudioLanguages = audioLanguages;
return this;
}
/**
* Sets whether this program cannot be recorded.
*
* @param prohibited The value of {@link TvContract.Programs#COLUMN_RECORDING_PROHIBITED}
* for the program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setRecordingProhibited(boolean prohibited) {
mProgram.mRecordingProhibited = prohibited ? IS_RECORDING_PROHIBITED : 0;
return this;
}
/**
* Sets whether this channel can be searched for in other applications.
*
* @param searchable The value of {@link TvContract.Programs#COLUMN_SEARCHABLE} for the
* program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setSearchable(boolean searchable) {
mProgram.mSearchable = searchable ? IS_SEARCHABLE : 0;
return this;
}
/**
* Sets a custom name for the season, if applicable.
*
* @param seasonTitle The value of {@link TvContract.Programs#COLUMN_SEASON_TITLE} for the
* program.
* @return This Builder object to allow for chaining of calls to builder methods.
*/
public Builder setSeasonTitle(String seasonTitle) {
mProgram.mSeasonTitle = seasonTitle;
return this;
}
/** @return A new Program with values supplied by the Builder. */
public Program build() {
Program program = new Program();
program.copyFrom(mProgram);
if (mProgram.getStartTimeUtcMillis() >= mProgram.getEndTimeUtcMillis()) {
throw new IllegalArgumentException(
"This program must have defined start and end " + "times");
}
return program;
}
}
}
|
apache/inlong | 38,290 | inlong-common/src/main/java/org/apache/inlong/common/msg/InLongMsg.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.inlong.common.msg;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import org.xerial.snappy.Snappy;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
public class InLongMsg {
private static final int DEFAULT_CAPACITY = 4096;
private final int capacity;
private static final int BIN_MSG_NO_ZIP = 0;
private static final int BIN_MSG_SNAPPY_TYPE = 1;
private static final int BIN_MSG_TOTALLEN_OFFSET = 0;
private static final int BIN_MSG_GROUPID_OFFSET = 5;
private static final int BIN_MSG_STREAMID_OFFSET = 7;
private static final int BIN_MSG_EXTFIELD_OFFSET = 9;
private static final int BIN_MSG_COUNT_OFFSET = 15;
private static final int BIN_MSG_DATATIME_OFFSET = 11;
private static final int BIN_MSG_TOTALLEN_SIZE = 4;
private static final int BIN_MSG_MSGTYPE_OFFSET = 4;
private static final int BIN_MSG_SET_SNAPPY = (1 << 5);
private static final int BIN_MSG_BODYLEN_SIZE = 4;
private static final int BIN_MSG_BODYLEN_OFFSET = 21;
private static final int BIN_MSG_BODY_OFFSET =
BIN_MSG_BODYLEN_SIZE + BIN_MSG_BODYLEN_OFFSET;
private static final int BIN_MSG_ATTRLEN_SIZE = 2;
private static final int BIN_MSG_FORMAT_SIZE = 29;
private static final int BIN_MSG_MAGIC_SIZE = 2;
private static final int BIN_MSG_MAGIC = 0xEE01;
private static final byte[] MAGIC0 = {(byte) 0xf, (byte) 0x0};
// with timestamp
private static final byte[] MAGIC1 = {(byte) 0xf, (byte) 0x1};
// with msg cnt 20130619
private static final byte[] MAGIC2 = {(byte) 0xf, (byte) 0x2};
// support msg_type = 6
private static final byte[] MAGIC3 = {(byte) 0xf, (byte) 0x3};
// support binmsg
private static final byte[] MAGIC4 = {(byte) 0xf, (byte) 0x4};
private final boolean addmode;
private static final Joiner.MapJoiner MAP_JOINER =
Joiner.on(AttributeConstants.SEPARATOR)
.withKeyValueSeparator(AttributeConstants.KEY_VALUE_SEPARATOR);
private static final Splitter.MapSplitter MAP_SPLITTER =
Splitter.on(AttributeConstants.SEPARATOR)
.trimResults().withKeyValueSeparator(AttributeConstants.KEY_VALUE_SEPARATOR);
static class DataBuffer {
DataOutputBuffer out;
int cnt;
public DataBuffer() {
out = new DataOutputBuffer();
}
public void write(byte[] array, int position, int len)
throws IOException {
cnt++;
out.writeInt(len);
out.write(array, position, len);
}
}
private LinkedHashMap<String, DataBuffer> attr2MsgBuffer;
private ByteBuffer binMsgBuffer;
private int datalen = 0;
private int msgcnt = 0;
private boolean compress;
private boolean isNumGroupId = false;
private boolean ischeck = true;
private boolean isSupportLF = false;
private final Version version;
private long timeoffset = 0;
public int getVersion() {
return version.intValue();
}
public void setTimeoffset(long offset) {
this.timeoffset = offset;
}
private enum Version {
vn(-1), v0(0), v1(1),
v2(2), v3(3), v4(4);
private static final Map<Integer, Version> INT_TO_TYPE_MAP =
new HashMap<Integer, Version>();
static {
for (Version type : Version.values()) {
INT_TO_TYPE_MAP.put(type.value, type);
}
}
private final int value;
private Version(int value) {
this.value = value;
}
public int intValue() {
return value;
}
public static Version of(int v) {
if (!INT_TO_TYPE_MAP.containsKey(v)) {
return vn;
}
return INT_TO_TYPE_MAP.get(v);
}
}
/**
* capacity: 4096, compress: true, version: 1
*
* @return
*/
public static InLongMsg newInLongMsg() {
return newInLongMsg(true);
}
/**
* capacity: 4096, version: 1
*
* @param compress if copress
* @return InLongMsg
*/
public static InLongMsg newInLongMsg(boolean compress) {
return newInLongMsg(DEFAULT_CAPACITY, compress);
}
/**
* capacity: 4096, compress: true
*
* @param v version info
* @return InLongMsg
*/
public static InLongMsg newInLongMsg(int v) {
return newInLongMsg(DEFAULT_CAPACITY, true, v);
}
/**
* capacity: 4096
*
* @param compress if compress
* @param v version
* @return InLongMsg
*/
public static InLongMsg newInLongMsg(boolean compress, int v) {
return newInLongMsg(DEFAULT_CAPACITY, compress, v);
}
/**
* version: 1
*
* @param capacity data capacity
* @param compress if compress
* @return InLongMsg
*/
public static InLongMsg newInLongMsg(int capacity, boolean compress) {
return new InLongMsg(capacity, compress, Version.v1);
}
/**
* netInLongMsg
* @param capacity data capacity
* @param compress compress
* @param v version
* @return InLongMsg
*/
public static InLongMsg newInLongMsg(int capacity, boolean compress, int v) {
return new InLongMsg(capacity, compress, Version.of(v));
}
// for create
private InLongMsg(int capacity, boolean compress, Version v) {
version = v;
addmode = true;
this.compress = compress;
this.capacity = capacity;
attr2MsgBuffer = new LinkedHashMap<String, DataBuffer>();
parsedInput = null;
reset();
}
/**
* return false means current msg is big enough, no other data should be
* added again, but attention: the input data has already been added, and if
* you add another data after return false it can also be added
* successfully.
*
* @param attr attribute info
* @param data binary data
* @param offset data start offset
* @param len data length
* @return boolean
*/
public boolean addMsg(String attr, byte[] data, int offset, int len) {
return addMsg(attr, ByteBuffer.wrap(data, offset, len));
}
/**
* add msg
* @param attr
* @param data
* @return
*/
public boolean addMsg(String attr, ByteBuffer data) {
checkMode(true);
if ((version.intValue() == Version.v3.intValue())
&& !checkData(data)) {
return false;
}
DataBuffer outputBuffer = attr2MsgBuffer.computeIfAbsent(attr, k -> new DataBuffer());
// attrlen + utflen + meglen + compress
this.datalen += attr.length() + 2 + 4 + 1;
int len = data.remaining();
try {
outputBuffer.write(data.array(), data.position(), len);
this.datalen += len + 4;
if (version.intValue() == Version.v2.intValue()) {
this.datalen += 4;
}
} catch (IOException e) {
e.printStackTrace();
return false;
}
msgcnt++;
return checkLen(attr, len);
}
public boolean addMsg(String attr, byte[] data) {
return addMsg(attr, ByteBuffer.wrap(data));
}
public boolean addMsg(byte[] data) {
return addMsg(ByteBuffer.wrap(data));
}
public boolean addMsg(ByteBuffer data) {
if (!checkBinData(data)) {
return false;
}
if (binMsgBuffer != null) {
return false;
}
binMsgBuffer = ByteBuffer.allocate(data.remaining());
binMsgBuffer.put(data);
binMsgBuffer.position(BIN_MSG_TOTALLEN_OFFSET);
msgcnt = getBinMsgCnt(binMsgBuffer);
return true;
}
private int getBinMsgtype(ByteBuffer data) {
return data.get(BIN_MSG_MSGTYPE_OFFSET);
}
private int getBinMsgCnt(ByteBuffer data) {
return data.getShort(BIN_MSG_COUNT_OFFSET);
}
private long getBinCreatetime(ByteBuffer data) {
return data.getInt(BIN_MSG_DATATIME_OFFSET) * 1000L;
}
private boolean getBinNumFlag(ByteBuffer data) {
return (data.getShort(BIN_MSG_EXTFIELD_OFFSET) & 0x4) == 0;
}
private boolean getBinisSupportLF(ByteBuffer data) {
return (data.getShort(BIN_MSG_EXTFIELD_OFFSET) & 0x20) == 0x20;
}
private boolean checkBinData(ByteBuffer data) {
int totalLen = data.getInt(BIN_MSG_TOTALLEN_OFFSET);
int bodyLen = data.getInt(BIN_MSG_BODYLEN_OFFSET);
int attrLen = data.getShort(BIN_MSG_BODY_OFFSET + bodyLen);
int msgMagic = (data.getShort(BIN_MSG_BODY_OFFSET + bodyLen
+ BIN_MSG_ATTRLEN_SIZE + attrLen) & 0xFFFF);
if ((totalLen + BIN_MSG_TOTALLEN_SIZE != (bodyLen + attrLen + BIN_MSG_FORMAT_SIZE))
|| (msgMagic != BIN_MSG_MAGIC)) {
return false;
}
return true;
}
public boolean addMsgs(String attr, ByteBuffer data) {
boolean res = true;
Iterator<ByteBuffer> it = getIteratorBuffer(data);
setCheckMode(false);
while (it.hasNext()) {
res = this.addMsg(attr, it.next());
}
setCheckMode(true);
return res;
}
private void setCheckMode(boolean mode) {
if (version.intValue() == Version.v3.intValue()) {
ischeck = mode;
}
}
// Version 3 message, need check data content
private boolean checkData(ByteBuffer data) {
if ((version.intValue() == Version.v3.intValue()) && !ischeck) {
return true;
}
// check data
data.mark();
int msgnum = 0;
while (data.remaining() > 0) {
int datalen = data.getInt();
if (datalen > data.remaining()) {
return false;
}
msgnum++;
byte[] record = new byte[datalen];
data.get(record, 0, datalen);
}
msgnum = msgnum / 2;
if (msgnum > 1) {
msgcnt += msgnum - 1;
}
data.reset();
return true;
}
private boolean checkLen(String attr, int len) {
return datalen < capacity;
}
public boolean isfull() {
checkMode(true);
if (datalen >= capacity) {
return true;
}
return false;
}
private ByteBuffer defaultBuild(long createtime) {
try {
this.createtime = createtime;
DataOutputBuffer out = new DataOutputBuffer(capacity);
writeHeader(out);
out.writeInt(attr2MsgBuffer.size());
if (compress) {
for (Map.Entry<String, DataBuffer> entry : attr2MsgBuffer.entrySet()) {
String attr = entry.getKey();
DataBuffer data = entry.getValue();
out.writeUTF(attr);
if (version.intValue() == Version.v2.intValue()) {
out.writeInt(data.cnt);
}
int guessLen =
Snappy.maxCompressedLength(data.out.getLength());
byte[] tmpData = new byte[guessLen];
int len = Snappy.compress(data.out.getData(), 0,
data.out.getLength(), tmpData, 0);
out.writeInt(len + 1);
out.writeBoolean(compress);
out.write(tmpData, 0, len);
}
} else {
for (Map.Entry<String, DataBuffer> entry : attr2MsgBuffer.entrySet()) {
String attr = entry.getKey();
DataBuffer data = entry.getValue();
out.writeUTF(attr);
if (version.intValue() == Version.v2.intValue()) {
out.writeInt(data.cnt);
}
out.writeInt(data.out.getLength() + 1);
out.writeBoolean(compress);
out.write(data.out.getData(), 0, data.out.getLength());
}
}
writeMagic(out);
out.close();
return ByteBuffer.wrap(out.getData(), 0, out.getLength());
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
private ByteBuffer binBuild(long createtime) {
try {
this.createtime = createtime;
DataOutputBuffer out = new DataOutputBuffer(capacity);
writeMagic(out);
int msgType = getBinMsgtype(binMsgBuffer);
int compressType = ((msgType & 0xE0) >> 5);
if ((compressType == 0) && (compress)) {
binMsgBuffer.position(BIN_MSG_BODYLEN_OFFSET);
// copy body data
int bodyLen = binMsgBuffer.getInt();
byte[] body = new byte[bodyLen];
binMsgBuffer.get(body, 0, bodyLen);
// copy attributes
int attrLen =
binMsgBuffer.getShort(BIN_MSG_BODY_OFFSET + bodyLen);
byte[] attr =
new byte[BIN_MSG_ATTRLEN_SIZE + attrLen + BIN_MSG_MAGIC_SIZE];
binMsgBuffer.get(attr, 0, attr.length);
int guessLen = Snappy.maxCompressedLength(bodyLen);
byte[] tmpData = new byte[guessLen];
int realLen = Snappy.compress(body, 0,
body.length, tmpData, 0);
int totalDataLen = binMsgBuffer.getInt(BIN_MSG_TOTALLEN_OFFSET);
ByteBuffer dataBuf = ByteBuffer.allocate(
totalDataLen + BIN_MSG_TOTALLEN_SIZE - body.length + realLen);
// copy headers
dataBuf.put(binMsgBuffer.array(), 0, BIN_MSG_BODYLEN_OFFSET);
// set compress flag
dataBuf.put(BIN_MSG_MSGTYPE_OFFSET, (byte) (msgType | BIN_MSG_SET_SNAPPY));
dataBuf.putInt(BIN_MSG_TOTALLEN_OFFSET,
realLen + attrLen + BIN_MSG_FORMAT_SIZE - 4);
// set data length
dataBuf.putInt(BIN_MSG_BODYLEN_OFFSET, realLen);
// fill compressed data
System.arraycopy(tmpData, 0,
dataBuf.array(), BIN_MSG_BODY_OFFSET, realLen);
// fill attributes and MAGIC
System.arraycopy(attr, 0, dataBuf.array(),
BIN_MSG_BODY_OFFSET + realLen, attr.length);
out.write(dataBuf.array(), 0, dataBuf.capacity());
} else {
out.write(binMsgBuffer.array(), 0, binMsgBuffer.capacity());
}
writeMagic(out);
out.close();
return ByteBuffer.wrap(out.getData(), 0, out.getLength());
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
public ByteBuffer build() {
return build(System.currentTimeMillis() + timeoffset);
}
public ByteBuffer build(long createtime) {
checkMode(true);
if (version.intValue() != Version.v4.intValue()) {
return defaultBuild(createtime);
} else {
return binBuild(createtime);
}
}
private void writeHeader(DataOutputBuffer out) throws IOException {
writeMagic(out);
if (version.intValue() == Version.v4.intValue()) {
return;
}
if (version.intValue() >= Version.v1.intValue()) {
// createtime = System.currentTimeMillis() + timeoffset;
out.writeLong(createtime);
}
if (version.intValue() >= Version.v2.intValue()) {
out.writeInt(this.getMsgCnt());
}
}
private void writeMagic(DataOutputBuffer out) throws IOException {
if (version == Version.v1) {
out.write(MAGIC1[0]);
out.write(MAGIC1[1]);
} else if (version == Version.v2) {
out.write(MAGIC2[0]);
out.write(MAGIC2[1]);
} else if (version == Version.v3) {
out.write(MAGIC3[0]);
out.write(MAGIC3[1]);
} else if (version == Version.v4) {
out.write(MAGIC4[0]);
out.write(MAGIC4[1]);
} else {
throw new IOException("wrong version : " + version.intValue());
}
}
public byte[] buildArray() {
return buildArray(System.currentTimeMillis() + timeoffset);
}
public byte[] buildArray(long createtime) {
ByteBuffer buffer = this.build(createtime);
if (buffer == null) {
return null;
}
byte[] res = new byte[buffer.remaining()];
System.arraycopy(buffer.array(), buffer.position(), res, 0, res.length);
return res;
}
public void reset() {
checkMode(true);
this.attr2MsgBuffer.clear();
this.datalen = getHeaderLen();
msgcnt = 0;
}
private int getHeaderLen() {
int len = 4; // magic
if (version.intValue() >= Version.v1.intValue()) {
len += 8; // create time
}
if (version.intValue() == Version.v2.intValue()) {
len += 4; // msgcnt
}
return len + 4; // attrcnt
}
// for both mode
public int getMsgCnt() {
return msgcnt;
}
public int getMsgCnt(String attr) {
if (addmode) {
return this.attr2MsgBuffer.get(attr).cnt;
} else {
return this.attr2Rawdata.get(attr).cnt;
}
}
private void checkMode(boolean add) {
if (addmode != add) {
throw new RuntimeException(
addmode ? "illegal operation in add mode !!!"
: "illegal operation in parse mode !!!");
}
}
private int attrcnt = -1;
// private LinkedHashMap<String, ByteBuffer> attr2Rawdata = null;
static class DataByteBuffer {
final int cnt;
ByteBuffer buffer;
DataOutputBuffer inoutBuffer;
public DataByteBuffer(int cnt, ByteBuffer buffer) {
this.cnt = cnt;
this.buffer = buffer;
}
public DataByteBuffer(int cnt, DataOutputBuffer inoutbuffer) {
this.cnt = cnt;
this.inoutBuffer = inoutbuffer;
}
public void syncByteBuffer() {
this.buffer = ByteBuffer.wrap(inoutBuffer.getData(), 0, inoutBuffer.getLength());
}
}
private LinkedHashMap<String, DataByteBuffer> attr2Rawdata = null;
// not used right now
// private LinkedHashMap<String, Integer> attr2index = null;
private long createtime = -1;
private boolean parsed = false;
private DataInputBuffer parsedInput;
private ByteBuffer parsedBinInput;
// for parsed
private InLongMsg(ByteBuffer buffer, Version magic) throws IOException {
version = magic;
addmode = false;
capacity = 0;
if (version.intValue() != Version.v4.intValue()) {
parsedInput = new DataInputBuffer();
parsedInput.reset(buffer.array(), buffer.position() + 2,
buffer.remaining());
if (version.intValue() >= Version.v1.intValue()) {
createtime = parsedInput.readLong();
}
if (version.intValue() >= Version.v2.intValue()) {
this.msgcnt = parsedInput.readInt();
}
attrcnt = parsedInput.readInt();
} else {
byte[] binMsg = new byte[buffer.remaining() - 2];
System.arraycopy(buffer.array(),
buffer.position() + 2, binMsg, 0, binMsg.length);
parsedBinInput = ByteBuffer.wrap(binMsg);
this.createtime = getBinCreatetime(parsedBinInput);
this.msgcnt = getBinMsgCnt(parsedBinInput);
this.isNumGroupId = getBinNumFlag(parsedBinInput);
this.isSupportLF = getBinisSupportLF(parsedBinInput);
}
}
private void parseDefault() throws IOException {
attr2Rawdata = new LinkedHashMap<String, DataByteBuffer>(
attrcnt * 10 / 7);
for (int i = 0; i < attrcnt; i++) {
String attr = parsedInput.readUTF();
int cnt = 0;
if (version.intValue() == Version.v2.intValue()) {
cnt = parsedInput.readInt();
}
int len = parsedInput.readInt();
int pos = parsedInput.getPosition();
attr2Rawdata.put(
attr,
new DataByteBuffer(cnt, ByteBuffer.wrap(
parsedInput.getData(), pos, len)));
parsedInput.skip(len);
}
}
private void parseMixAttr() throws IOException {
attr2Rawdata = new LinkedHashMap<String, DataByteBuffer>(
this.msgcnt * 10 / 7);
for (int i = 0; i < attrcnt; i++) {
ByteBuffer bodyBuffer;
String commonAttr = parsedInput.readUTF();
int len = parsedInput.readInt();
int compress = parsedInput.readByte();
int pos = parsedInput.getPosition();
if (compress == 1) {
byte[] uncompressdata = new byte[Snappy.uncompressedLength(
parsedInput.getData(), pos, len - 1)];
int msgLen = Snappy.uncompress(parsedInput.getData(), pos, len - 1,
uncompressdata, 0);
bodyBuffer = ByteBuffer.wrap(uncompressdata, 0, msgLen);
} else {
bodyBuffer = ByteBuffer.wrap(parsedInput.getData(), pos, len - 1);
}
parsedInput.skip(len - 1);
while (bodyBuffer.remaining() > 0) {
// total message length = (data length + attributes length) * N
int singleTotalLen = bodyBuffer.getInt();
if (singleTotalLen > bodyBuffer.remaining()) {
return;
}
while (singleTotalLen > 0) {
// single data length
int msgItemLen = bodyBuffer.getInt();
if (msgItemLen <= 0 || msgItemLen > singleTotalLen) {
return;
}
byte[] record = new byte[1 + 4 + msgItemLen];
record[0] = 0;
record[1] = (byte) ((msgItemLen >> 24) & 0xFF);
record[2] = (byte) ((msgItemLen >> 16) & 0xFF);
record[3] = (byte) ((msgItemLen >> 8) & 0xFF);
record[4] = (byte) (msgItemLen & 0xFF);
bodyBuffer.get(record, 1 + 4, msgItemLen);
// single attribute length
int singleAttrLen = bodyBuffer.getInt();
if (singleAttrLen <= 0 || singleAttrLen > singleTotalLen) {
return;
}
byte[] attrBuf = new byte[singleAttrLen];
bodyBuffer.get(attrBuf, 0, singleAttrLen);
String finalAttr = commonAttr + "&" + new String(attrBuf);
DataByteBuffer inputBuffer = attr2Rawdata.get(finalAttr);
if (inputBuffer == null) {
inputBuffer = new DataByteBuffer(0,
new DataOutputBuffer(msgItemLen + 4 + 1));
attr2Rawdata.put(finalAttr, inputBuffer);
inputBuffer.inoutBuffer.write(record, 0, msgItemLen + 4 + 1);
} else {
inputBuffer.inoutBuffer.write(record, 1, msgItemLen + 4);
}
singleTotalLen = singleTotalLen - msgItemLen - singleAttrLen - 8;
}
}
}
// sync data
for (String attr : attr2Rawdata.keySet()) {
DataByteBuffer data = attr2Rawdata.get(attr);
data.syncByteBuffer();
}
}
private void parseBinMsg() throws IOException {
Map<String, String> commonAttrMap = new HashMap<String, String>();
int totalLen = parsedBinInput.getInt(BIN_MSG_TOTALLEN_OFFSET);
final int msgtype = parsedBinInput.get(BIN_MSG_MSGTYPE_OFFSET);
int groupIdNum = parsedBinInput.getShort(BIN_MSG_GROUPID_OFFSET);
int streamIdNum = parsedBinInput.getShort(BIN_MSG_STREAMID_OFFSET);
int bodyLen = parsedBinInput.getInt(BIN_MSG_BODYLEN_OFFSET);
long dataTime = parsedBinInput.getInt(BIN_MSG_DATATIME_OFFSET);
final int extField = parsedBinInput.getShort(BIN_MSG_EXTFIELD_OFFSET);
int attrLen = parsedBinInput.getShort(BIN_MSG_BODY_OFFSET + bodyLen);
int msgMagic = (parsedBinInput.getShort(BIN_MSG_BODY_OFFSET
+ bodyLen + BIN_MSG_ATTRLEN_SIZE + attrLen) & 0xFFFF);
dataTime = dataTime * 1000;
// read common attributes
if (attrLen != 0) {
byte[] attr = new byte[attrLen];
parsedBinInput.position(BIN_MSG_BODY_OFFSET + bodyLen + BIN_MSG_ATTRLEN_SIZE);
parsedBinInput.get(attr);
String strAttr = new String(attr);
commonAttrMap = new HashMap<String, String>(MAP_SPLITTER.split(strAttr));
}
commonAttrMap.put(AttributeConstants.DATA_TIME, String.valueOf(dataTime));
// unzip data
ByteBuffer bodyBuffer;
byte[] body = new byte[bodyLen + 1];
parsedBinInput.position(BIN_MSG_BODY_OFFSET);
parsedBinInput.get(body, 1, bodyLen);
int zipType = (msgtype & 0xE0) >> 5;
switch (zipType) {
case (BIN_MSG_SNAPPY_TYPE):
byte[] uncompressdata =
new byte[Snappy.uncompressedLength(body, 1, body.length - 1) + 1];
// uncompress flag
uncompressdata[0] = 0;
int msgLen = Snappy.uncompress(body, 1, body.length - 1,
uncompressdata, 1);
bodyBuffer = ByteBuffer.wrap(uncompressdata, 0, msgLen + 1);
break;
case (BIN_MSG_NO_ZIP):
default:
// set uncompress flag
body[0] = 0;
bodyBuffer = ByteBuffer.wrap(body, 0, body.length);
break;
}
// number groupId/streamId
boolean isUseNumGroupId = ((extField & 0x4) == 0x0);
if (isUseNumGroupId) {
commonAttrMap.put(AttributeConstants.GROUP_ID, String.valueOf(groupIdNum));
commonAttrMap.put(AttributeConstants.STREAM_ID, String.valueOf(streamIdNum));
}
boolean hasOtherAttr = ((extField & 0x1) == 0x1);
commonAttrMap.put(AttributeConstants.MESSAGE_COUNT, String.valueOf(this.msgcnt));
// with private attributes,
// need to splice private attributes + public attributes
if (!hasOtherAttr) {
// general attributes and data map
attr2Rawdata = new LinkedHashMap<String, DataByteBuffer>();
attr2Rawdata.put(MAP_JOINER.join(commonAttrMap),
new DataByteBuffer(0, bodyBuffer));
} else {
attr2Rawdata = new LinkedHashMap<String, DataByteBuffer>(
this.msgcnt * 10 / 7);
Map<String, String> finalAttrMap = commonAttrMap;
// skip compress flag
bodyBuffer.get();
int bodyBufLen = bodyBuffer.capacity() - 1;
while (bodyBufLen > 0) {
// get single message length
int singleMsgLen = bodyBuffer.getInt();
if (singleMsgLen <= 0 || singleMsgLen > bodyBufLen) {
return;
}
byte[] record = new byte[1 + 4 + singleMsgLen];
record[0] = 0;
record[1] = (byte) ((singleMsgLen >> 24) & 0xFF);
record[2] = (byte) ((singleMsgLen >> 16) & 0xFF);
record[3] = (byte) ((singleMsgLen >> 8) & 0xFF);
record[4] = (byte) (singleMsgLen & 0xFF);
bodyBuffer.get(record, 1 + 4, singleMsgLen);
// get single attribute length
int singleAttrLen = bodyBuffer.getInt();
if (singleAttrLen <= 0 || singleAttrLen > bodyBufLen) {
return;
}
byte[] attrBuf = new byte[singleAttrLen];
bodyBuffer.get(attrBuf, 0, singleAttrLen);
String attrBufStr = new String(attrBuf);
finalAttrMap = new HashMap<String, String>(MAP_SPLITTER.split(attrBufStr));
finalAttrMap.putAll(commonAttrMap);
DataByteBuffer inputBuffer = attr2Rawdata.get(MAP_JOINER.join(finalAttrMap));
if (inputBuffer == null) {
inputBuffer = new DataByteBuffer(0,
new DataOutputBuffer(singleMsgLen + 4 + 1));
attr2Rawdata.put(MAP_JOINER.join(finalAttrMap), inputBuffer);
inputBuffer.inoutBuffer.write(record, 0, singleMsgLen + 4 + 1);
} else {
inputBuffer.inoutBuffer.write(record, 1, singleMsgLen + 4);
}
bodyBufLen = bodyBufLen - singleMsgLen - singleAttrLen - 8;
}
// sync data
for (String attr : attr2Rawdata.keySet()) {
DataByteBuffer data = attr2Rawdata.get(attr);
data.syncByteBuffer();
}
}
}
private void parse() throws IOException {
if (parsed) {
return;
}
if (version.intValue() < Version.v3.intValue()) {
parseDefault();
} else if (version.intValue() == Version.v3.intValue()) {
parseMixAttr();
} else {
parseBinMsg();
}
parsed = true;
}
private static Version getMagic(ByteBuffer buffer) {
// #lizard forgives
byte[] array = buffer.array();
if (buffer.remaining() < 4) {
return Version.vn;
}
int pos = buffer.position();
int rem = buffer.remaining();
if (array[pos] == MAGIC1[0] && array[pos + 1] == MAGIC1[1]
&& array[pos + rem - 2] == MAGIC1[0]
&& array[pos + rem - 1] == MAGIC1[1]) {
return Version.v1;
}
if (array[pos] == MAGIC2[0] && array[pos + 1] == MAGIC2[1]
&& array[pos + rem - 2] == MAGIC2[0]
&& array[pos + rem - 1] == MAGIC2[1]) {
return Version.v2;
}
if (array[pos] == MAGIC3[0] && array[pos + 1] == MAGIC3[1]
&& array[pos + rem - 2] == MAGIC3[0]
&& array[pos + rem - 1] == MAGIC3[1]) {
return Version.v3;
}
if (array[pos] == MAGIC4[0] && array[pos + 1] == MAGIC4[1]
&& array[pos + rem - 2] == MAGIC4[0]
&& array[pos + rem - 1] == MAGIC4[1]) {
return Version.v4;
}
if (array[pos] == MAGIC0[0] && array[pos + 1] == MAGIC0[1]
&& array[pos + rem - 2] == MAGIC0[0]
&& array[pos + rem - 1] == MAGIC0[1]) {
return Version.v0;
}
return Version.vn;
}
public static InLongMsg parseFrom(byte[] data) {
return parseFrom(ByteBuffer.wrap(data));
}
public static InLongMsg parseFrom(ByteBuffer buffer) {
Version magic = getMagic(buffer);
if (magic == Version.vn) {
return null;
}
try {
return new InLongMsg(buffer, magic);
} catch (IOException e) {
return null;
}
}
private void makeSureParsed() {
if (!parsed) {
try {
parse();
} catch (IOException e) {
//
}
}
}
public Set<String> getAttrs() {
checkMode(false);
makeSureParsed();
return this.attr2Rawdata.keySet();
}
public byte[] getRawData(String attr) {
checkMode(false);
makeSureParsed();
ByteBuffer buffer = getRawDataBuffer(attr);
byte[] data = new byte[buffer.remaining()];
System.arraycopy(buffer.array(), buffer.position(), data, 0,
buffer.remaining());
return data;
}
public ByteBuffer getRawDataBuffer(String attr) {
checkMode(false);
makeSureParsed();
return this.attr2Rawdata.get(attr).buffer;
}
public Iterator<byte[]> getIterator(String attr) {
checkMode(false);
makeSureParsed();
return getIterator(this.attr2Rawdata.get(attr).buffer);
}
public static Iterator<byte[]> getIterator(byte[] rawdata) {
return getIterator(ByteBuffer.wrap(rawdata));
}
/**
* getIterator
* @param rawdata
* @return
*/
public static Iterator<byte[]> getIterator(ByteBuffer rawdata) {
try {
final DataInputBuffer input = new DataInputBuffer();
byte[] array = rawdata.array();
int pos = rawdata.position();
int rem = rawdata.remaining() - 1;
int compress = array[pos];
if (compress == 1) {
byte[] uncompressdata = new byte[Snappy.uncompressedLength(
array, pos + 1, rem)];
int len = Snappy.uncompress(array, pos + 1, rem,
uncompressdata, 0);
input.reset(uncompressdata, len);
} else {
input.reset(array, pos + 1, rem);
}
return new Iterator<byte[]>() {
@Override
public boolean hasNext() {
try {
return input.available() > 0;
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
@Override
public byte[] next() {
try {
int len;
len = input.readInt();
byte[] res = new byte[len];
input.read(res);
return res;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
@Override
public void remove() {
this.next();
}
};
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
public static Iterator<ByteBuffer> getIteratorBuffer(byte[] rawdata) {
return getIteratorBuffer(ByteBuffer.wrap(rawdata));
}
public Iterator<ByteBuffer> getIteratorBuffer(String attr) {
checkMode(false);
makeSureParsed();
return getIteratorBuffer(this.attr2Rawdata.get(attr).buffer);
}
public static Iterator<ByteBuffer> getIteratorBuffer(ByteBuffer rawdata) {
try {
final DataInputBuffer input = new DataInputBuffer();
byte[] array = rawdata.array();
int pos = rawdata.position();
int rem = rawdata.remaining() - 1;
int compress = array[pos];
if (compress == 1) {
byte[] uncompressdata = new byte[Snappy.uncompressedLength(
array, pos + 1, rem)];
int len = Snappy.uncompress(array, pos + 1, rem,
uncompressdata, 0);
input.reset(uncompressdata, len);
} else {
input.reset(array, pos + 1, rem);
}
final byte[] uncompressdata = input.getData();
return new Iterator<ByteBuffer>() {
@Override
public boolean hasNext() {
try {
return input.available() > 0;
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
@Override
public ByteBuffer next() {
try {
int len = input.readInt();
int pos = input.getPosition();
input.skip(len);
return ByteBuffer.wrap(uncompressdata, pos, len);
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
@Override
public void remove() {
this.next();
}
};
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
public long getCreatetime() {
return createtime;
}
public int getAttrCount() {
checkMode(false);
return attrcnt;
}
public boolean isNumGroupId() {
checkMode(false);
return isNumGroupId;
}
public boolean isSupportLF() {
return isSupportLF;
}
}
|
apache/lucene | 38,306 | lucene/core/src/java/org/apache/lucene/util/bkd/BKDReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.util.bkd;
import java.io.IOException;
import java.util.Arrays;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.search.AbstractDocIdSetIterator;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.MathUtil;
/**
* Handles reading a block KD-tree in byte[] space previously written with {@link BKDWriter}.
*
* @lucene.experimental
*/
public class BKDReader extends PointValues {
final BKDConfig config;
final int numLeaves;
final IndexInput in;
final byte[] minPackedValue;
final byte[] maxPackedValue;
final long pointCount;
final int docCount;
final int version;
final long minLeafBlockFP;
private final long indexStartPointer;
private final int numIndexBytes;
private final IndexInput indexIn;
// if true, the tree is a legacy balanced tree
private final boolean isTreeBalanced;
/**
* Caller must pre-seek the provided {@link IndexInput} to the index location that {@link
* BKDWriter#finish} returned. BKD tree is always stored off-heap.
*/
public BKDReader(IndexInput metaIn, IndexInput indexIn, IndexInput dataIn) throws IOException {
version =
CodecUtil.checkHeader(
metaIn, BKDWriter.CODEC_NAME, BKDWriter.VERSION_START, BKDWriter.VERSION_CURRENT);
final int numDims = metaIn.readVInt();
final int numIndexDims;
if (version >= BKDWriter.VERSION_SELECTIVE_INDEXING) {
numIndexDims = metaIn.readVInt();
} else {
numIndexDims = numDims;
}
final int maxPointsInLeafNode = metaIn.readVInt();
final int bytesPerDim = metaIn.readVInt();
config = BKDConfig.of(numDims, numIndexDims, bytesPerDim, maxPointsInLeafNode);
// Read index:
numLeaves = metaIn.readVInt();
assert numLeaves > 0;
byte[] minPackedValue = new byte[config.packedIndexBytesLength()];
byte[] maxPackedValue = new byte[config.packedIndexBytesLength()];
metaIn.readBytes(minPackedValue, 0, config.packedIndexBytesLength());
metaIn.readBytes(maxPackedValue, 0, config.packedIndexBytesLength());
final ArrayUtil.ByteArrayComparator comparator =
ArrayUtil.getUnsignedComparator(config.bytesPerDim());
for (int dim = 0; dim < config.numIndexDims(); dim++) {
if (comparator.compare(
minPackedValue,
dim * config.bytesPerDim(),
maxPackedValue,
dim * config.bytesPerDim())
> 0) {
throw new CorruptIndexException(
"minPackedValue "
+ new BytesRef(minPackedValue)
+ " is > maxPackedValue "
+ new BytesRef(maxPackedValue)
+ " for dim="
+ dim,
metaIn);
}
}
this.minPackedValue = minPackedValue;
if (Arrays.equals(maxPackedValue, minPackedValue)) {
// save heap for edge case of only a single value
this.maxPackedValue = minPackedValue;
} else {
this.maxPackedValue = maxPackedValue;
}
pointCount = metaIn.readVLong();
docCount = metaIn.readVInt();
numIndexBytes = metaIn.readVInt();
if (version >= BKDWriter.VERSION_META_FILE) {
minLeafBlockFP = metaIn.readLong();
indexStartPointer = metaIn.readLong();
} else {
indexStartPointer = indexIn.getFilePointer();
minLeafBlockFP = indexIn.readVLong();
indexIn.seek(indexStartPointer);
}
this.indexIn = indexIn;
this.in = dataIn;
// for only one leaf, balanced and unbalanced trees can be handled the same way
// we set it to unbalanced.
this.isTreeBalanced = numLeaves != 1 && isTreeBalanced();
}
private boolean isTreeBalanced() throws IOException {
if (version >= BKDWriter.VERSION_META_FILE) {
// since lucene 8.6 all trees are unbalanced.
return false;
}
if (config.numDims() > 1) {
// high dimensional tree in pre-8.6 indices are balanced.
assert 1 << MathUtil.log(numLeaves, 2) == numLeaves;
return true;
}
if (1 << MathUtil.log(numLeaves, 2) != numLeaves) {
// if we don't have enough leaves to fill the last level then it is unbalanced
return false;
}
// count of the last node for unbalanced trees
final int lastLeafNodePointCount = Math.toIntExact(pointCount % config.maxPointsInLeafNode());
// navigate to last node
PointTree pointTree = getPointTree();
do {
while (pointTree.moveToSibling()) {}
} while (pointTree.moveToChild());
// count number of docs in the node
final int[] count = new int[] {0};
pointTree.visitDocIDs(
new IntersectVisitor() {
@Override
public void visit(int docID) {
count[0]++;
}
@Override
public void visit(DocIdSetIterator iterator) throws IOException {
int docID;
while ((docID = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
visit(docID);
}
}
@Override
public void visit(IntsRef ref) {
count[0] += ref.length;
}
@Override
public void visit(int docID, byte[] packedValue) {
throw new AssertionError();
}
@Override
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
throw new AssertionError();
}
});
return count[0] != lastLeafNodePointCount;
}
@Override
public PointTree getPointTree() throws IOException {
return new BKDPointTree(
indexIn.slice("packedIndex", indexStartPointer, numIndexBytes),
this.in.clone(),
config,
numLeaves,
version,
pointCount,
minPackedValue,
maxPackedValue,
isTreeBalanced);
}
private static class BKDPointTree implements PointTree {
private int nodeID;
// during clone, the node root can be different to 1
private final int nodeRoot;
// level is 1-based so that we can do level-1 w/o checking each time:
private int level;
// used to read the packed tree off-heap
private final IndexInput innerNodes;
// used to read the packed leaves off-heap
private final IndexInput leafNodes;
// holds the minimum (left most) leaf block file pointer for each level we've recursed to:
private final long[] leafBlockFPStack;
// holds the address, in the off-heap index, after reading the node data of each level:
private final int[] readNodeDataPositions;
// holds the address, in the off-heap index, of the right-node of each level:
private final int[] rightNodePositions;
// holds the splitDim position for each level:
private final int[] splitDimsPos;
// true if the per-dim delta we read for the node at this level is a negative offset vs. the
// last split on this dim; this is a packed
// 2D array, i.e. to access array[level][dim] you read from negativeDeltas[level*numDims+dim].
// this will be true if the last time we
// split on this dimension, we next pushed to the left sub-tree:
private final boolean[] negativeDeltas;
// holds the packed per-level split values
private final byte[][] splitValuesStack;
// holds the min / max value of the current node.
private final byte[] minPackedValue, maxPackedValue;
// holds the previous value of the split dimension
private final byte[][] splitDimValueStack;
// tree parameters
private final BKDConfig config;
// number of leaves
private final int leafNodeOffset;
// version of the index
private final int version;
// total number of points
final long pointCount;
// last node might not be fully populated
private final int lastLeafNodePointCount;
// right most leaf node ID
private final int rightMostLeafNode;
// helper objects for reading doc values
private final byte[] scratchDataPackedValue,
scratchMinIndexPackedValue,
scratchMaxIndexPackedValue;
private final int[] commonPrefixLengths;
private final BKDReaderDocIDSetIterator scratchIterator;
private final DocIdsWriter docIdsWriter;
// if true the tree is balanced, otherwise unbalanced
private final boolean isTreeBalanced;
private final IntsRef scratchIntsRef = new IntsRef();
{
assert scratchIntsRef.offset == 0;
}
private BKDPointTree(
IndexInput innerNodes,
IndexInput leafNodes,
BKDConfig config,
int numLeaves,
int version,
long pointCount,
byte[] minPackedValue,
byte[] maxPackedValue,
boolean isTreeBalanced)
throws IOException {
this(
innerNodes,
leafNodes,
config,
numLeaves,
version,
pointCount,
1,
1,
minPackedValue,
maxPackedValue,
new BKDReaderDocIDSetIterator(config.maxPointsInLeafNode(), version),
new byte[config.packedBytesLength()],
new byte[config.packedIndexBytesLength()],
new byte[config.packedIndexBytesLength()],
new int[config.numDims()],
isTreeBalanced);
// read root node
readNodeData(false);
}
private BKDPointTree(
IndexInput innerNodes,
IndexInput leafNodes,
BKDConfig config,
int numLeaves,
int version,
long pointCount,
int nodeID,
int level,
byte[] minPackedValue,
byte[] maxPackedValue,
BKDReaderDocIDSetIterator scratchIterator,
byte[] scratchDataPackedValue,
byte[] scratchMinIndexPackedValue,
byte[] scratchMaxIndexPackedValue,
int[] commonPrefixLengths,
boolean isTreeBalanced) {
this.config = config;
this.version = version;
this.nodeID = nodeID;
this.nodeRoot = nodeID;
this.level = level;
this.isTreeBalanced = isTreeBalanced;
leafNodeOffset = numLeaves;
this.innerNodes = innerNodes;
this.leafNodes = leafNodes;
this.minPackedValue = minPackedValue.clone();
this.maxPackedValue = maxPackedValue.clone();
// stack arrays that keep information at different levels
int treeDepth = getTreeDepth(numLeaves);
splitDimValueStack = new byte[treeDepth][];
splitValuesStack = new byte[treeDepth][];
splitValuesStack[0] = new byte[config.packedIndexBytesLength()];
leafBlockFPStack = new long[treeDepth + 1];
readNodeDataPositions = new int[treeDepth + 1];
rightNodePositions = new int[treeDepth];
splitDimsPos = new int[treeDepth];
negativeDeltas = new boolean[config.numIndexDims() * treeDepth];
// information about the unbalance of the tree so we can report the exact size below a node
this.pointCount = pointCount;
rightMostLeafNode = (1 << treeDepth - 1) - 1;
int lastLeafNodePointCount = Math.toIntExact(pointCount % config.maxPointsInLeafNode());
this.lastLeafNodePointCount =
lastLeafNodePointCount == 0 ? config.maxPointsInLeafNode() : lastLeafNodePointCount;
// scratch objects, reused between clones so NN search are not creating those objects
// in every clone.
this.scratchIterator = scratchIterator;
this.commonPrefixLengths = commonPrefixLengths;
this.scratchDataPackedValue = scratchDataPackedValue;
this.scratchMinIndexPackedValue = scratchMinIndexPackedValue;
this.scratchMaxIndexPackedValue = scratchMaxIndexPackedValue;
this.docIdsWriter = scratchIterator.docIdsWriter;
}
@Override
public PointTree clone() {
BKDPointTree index =
new BKDPointTree(
innerNodes.clone(),
leafNodes.clone(),
config,
leafNodeOffset,
version,
pointCount,
nodeID,
level,
minPackedValue,
maxPackedValue,
scratchIterator,
scratchDataPackedValue,
scratchMinIndexPackedValue,
scratchMaxIndexPackedValue,
commonPrefixLengths,
isTreeBalanced);
index.leafBlockFPStack[index.level] = leafBlockFPStack[level];
if (isLeafNode() == false) {
// copy node data
index.rightNodePositions[index.level] = rightNodePositions[level];
index.readNodeDataPositions[index.level] = readNodeDataPositions[level];
index.splitValuesStack[index.level] = splitValuesStack[level].clone();
System.arraycopy(
negativeDeltas,
level * config.numIndexDims(),
index.negativeDeltas,
level * config.numIndexDims(),
config.numIndexDims());
index.splitDimsPos[level] = splitDimsPos[level];
}
return index;
}
@Override
public byte[] getMinPackedValue() {
return minPackedValue;
}
@Override
public byte[] getMaxPackedValue() {
return maxPackedValue;
}
@Override
public boolean moveToChild() throws IOException {
if (isLeafNode()) {
return false;
}
resetNodeDataPosition();
pushBoundsLeft();
pushLeft();
return true;
}
private void resetNodeDataPosition() throws IOException {
// move position of the inner nodes index to visit the first child
assert readNodeDataPositions[level] <= innerNodes.getFilePointer();
innerNodes.seek(readNodeDataPositions[level]);
}
private void pushBoundsLeft() {
final int splitDimPos = splitDimsPos[level];
if (splitDimValueStack[level] == null) {
splitDimValueStack[level] = new byte[config.bytesPerDim()];
}
// save the dimension we are going to change
System.arraycopy(
maxPackedValue, splitDimPos, splitDimValueStack[level], 0, config.bytesPerDim());
assert ArrayUtil.getUnsignedComparator(config.bytesPerDim())
.compare(maxPackedValue, splitDimPos, splitValuesStack[level], splitDimPos)
>= 0
: "config.bytesPerDim()="
+ config.bytesPerDim()
+ " splitDimPos="
+ splitDimsPos[level]
+ " config.numIndexDims()="
+ config.numIndexDims()
+ " config.numDims()="
+ config.numDims();
// add the split dim value:
System.arraycopy(
splitValuesStack[level], splitDimPos, maxPackedValue, splitDimPos, config.bytesPerDim());
}
private void pushLeft() throws IOException {
nodeID *= 2;
level++;
readNodeData(true);
}
private void pushBoundsRight() {
final int splitDimPos = splitDimsPos[level];
// we should have already visited the left node
assert splitDimValueStack[level] != null;
// save the dimension we are going to change
System.arraycopy(
minPackedValue, splitDimPos, splitDimValueStack[level], 0, config.bytesPerDim());
assert ArrayUtil.getUnsignedComparator(config.bytesPerDim())
.compare(minPackedValue, splitDimPos, splitValuesStack[level], splitDimPos)
<= 0
: "config.bytesPerDim()="
+ config.bytesPerDim()
+ " splitDimPos="
+ splitDimsPos[level]
+ " config.numIndexDims()="
+ config.numIndexDims()
+ " config.numDims()="
+ config.numDims();
// add the split dim value:
System.arraycopy(
splitValuesStack[level], splitDimPos, minPackedValue, splitDimPos, config.bytesPerDim());
}
private void pushRight() throws IOException {
final int nodePosition = rightNodePositions[level];
assert nodePosition >= innerNodes.getFilePointer()
: "nodePosition = " + nodePosition + " < currentPosition=" + innerNodes.getFilePointer();
innerNodes.seek(nodePosition);
nodeID = 2 * nodeID + 1;
level++;
readNodeData(false);
}
@Override
public boolean moveToSibling() throws IOException {
if (isLeftNode() == false || isRootNode()) {
return false;
}
pop();
popBounds(maxPackedValue);
pushBoundsRight();
pushRight();
assert nodeExists();
return true;
}
private void pop() {
nodeID /= 2;
level--;
}
private void popBounds(byte[] packedValue) {
// restore the split dimension
System.arraycopy(
splitDimValueStack[level], 0, packedValue, splitDimsPos[level], config.bytesPerDim());
}
@Override
public boolean moveToParent() {
if (isRootNode()) {
return false;
}
final byte[] packedValue = isLeftNode() ? maxPackedValue : minPackedValue;
pop();
popBounds(packedValue);
return true;
}
private boolean isRootNode() {
return nodeID == nodeRoot;
}
private boolean isLeftNode() {
return (nodeID & 1) == 0;
}
private boolean isLeafNode() {
return nodeID >= leafNodeOffset;
}
private boolean nodeExists() {
return nodeID - leafNodeOffset < leafNodeOffset;
}
/** Only valid after pushLeft or pushRight, not pop! */
private long getLeafBlockFP() {
assert isLeafNode() : "nodeID=" + nodeID + " is not a leaf";
return leafBlockFPStack[level];
}
@Override
public long size() {
int leftMostLeafNode = nodeID;
while (leftMostLeafNode < leafNodeOffset) {
leftMostLeafNode = leftMostLeafNode * 2;
}
int rightMostLeafNode = nodeID;
while (rightMostLeafNode < leafNodeOffset) {
rightMostLeafNode = rightMostLeafNode * 2 + 1;
}
final int numLeaves;
if (rightMostLeafNode >= leftMostLeafNode) {
// both are on the same level
numLeaves = rightMostLeafNode - leftMostLeafNode + 1;
} else {
// left is one level deeper than right
numLeaves = rightMostLeafNode - leftMostLeafNode + 1 + leafNodeOffset;
}
assert numLeaves == getNumLeavesSlow(nodeID) : numLeaves + " " + getNumLeavesSlow(nodeID);
if (isTreeBalanced) {
// before lucene 8.6, trees might have been constructed as fully balanced trees.
return sizeFromBalancedTree(leftMostLeafNode, rightMostLeafNode);
}
// size for an unbalanced tree.
return rightMostLeafNode == this.rightMostLeafNode
? (long) (numLeaves - 1) * config.maxPointsInLeafNode() + lastLeafNodePointCount
: (long) numLeaves * config.maxPointsInLeafNode();
}
private long sizeFromBalancedTree(int leftMostLeafNode, int rightMostLeafNode) {
// number of points that need to be distributed between leaves, one per leaf
final int extraPoints =
Math.toIntExact(((long) config.maxPointsInLeafNode() * this.leafNodeOffset) - pointCount);
assert extraPoints < leafNodeOffset : "point excess should be lower than leafNodeOffset";
// offset where we stop adding one point to the leaves
final int nodeOffset = leafNodeOffset - extraPoints;
long count = 0;
for (int node = leftMostLeafNode; node <= rightMostLeafNode; node++) {
// offsetPosition provides which extra point will be added to this node
if (balanceTreeNodePosition(0, leafNodeOffset, node - leafNodeOffset, 0, 0) < nodeOffset) {
count += config.maxPointsInLeafNode();
} else {
count += config.maxPointsInLeafNode() - 1;
}
}
return count;
}
private int balanceTreeNodePosition(
int minNode, int maxNode, int node, int position, int level) {
if (maxNode - minNode == 1) {
return position;
}
final int mid = (minNode + maxNode + 1) >>> 1;
if (mid > node) {
return balanceTreeNodePosition(minNode, mid, node, position, level + 1);
} else {
return balanceTreeNodePosition(mid, maxNode, node, position + (1 << level), level + 1);
}
}
@Override
public void visitDocIDs(PointValues.IntersectVisitor visitor) throws IOException {
resetNodeDataPosition();
addAll(visitor, false);
}
public void addAll(PointValues.IntersectVisitor visitor, boolean grown) throws IOException {
if (grown == false) {
final long size = size();
if (size <= Integer.MAX_VALUE) {
visitor.grow((int) size);
grown = true;
}
}
if (isLeafNode()) {
// Leaf node
leafNodes.seek(getLeafBlockFP());
// How many points are stored in this leaf cell:
int count = leafNodes.readVInt();
// No need to call grow(), it has been called up-front
// Borrow scratchIterator.docIds as decoding buffer
docIdsWriter.readInts(leafNodes, count, visitor, scratchIterator.docIDs);
} else {
pushLeft();
addAll(visitor, grown);
pop();
pushRight();
addAll(visitor, grown);
pop();
}
}
@Override
public void visitDocValues(PointValues.IntersectVisitor visitor) throws IOException {
resetNodeDataPosition();
visitLeavesOneByOne(visitor);
}
private void visitLeavesOneByOne(PointValues.IntersectVisitor visitor) throws IOException {
if (isLeafNode()) {
// Leaf node
visitDocValues(visitor, getLeafBlockFP());
} else {
pushLeft();
visitLeavesOneByOne(visitor);
pop();
pushRight();
visitLeavesOneByOne(visitor);
pop();
}
}
private void visitDocValues(PointValues.IntersectVisitor visitor, long fp) throws IOException {
// Leaf node; scan and filter all points in this block:
int count = readDocIDs(leafNodes, fp, scratchIterator);
if (version >= BKDWriter.VERSION_LOW_CARDINALITY_LEAVES) {
visitDocValuesWithCardinality(
commonPrefixLengths,
scratchDataPackedValue,
scratchMinIndexPackedValue,
scratchMaxIndexPackedValue,
leafNodes,
scratchIterator,
count,
visitor);
} else {
visitDocValuesNoCardinality(
commonPrefixLengths,
scratchDataPackedValue,
scratchMinIndexPackedValue,
scratchMaxIndexPackedValue,
leafNodes,
scratchIterator,
count,
visitor);
}
}
private int readDocIDs(IndexInput in, long blockFP, BKDReaderDocIDSetIterator iterator)
throws IOException {
in.seek(blockFP);
// How many points are stored in this leaf cell:
int count = in.readVInt();
docIdsWriter.readInts(in, count, iterator.docIDs);
return count;
}
// for assertions
private int getNumLeavesSlow(int node) {
if (node >= 2 * leafNodeOffset) {
return 0;
} else if (node >= leafNodeOffset) {
return 1;
} else {
final int leftCount = getNumLeavesSlow(node * 2);
final int rightCount = getNumLeavesSlow(node * 2 + 1);
return leftCount + rightCount;
}
}
private void readNodeData(boolean isLeft) throws IOException {
leafBlockFPStack[level] = leafBlockFPStack[level - 1];
if (isLeft == false) {
// read leaf block FP delta
leafBlockFPStack[level] += innerNodes.readVLong();
}
if (isLeafNode() == false) {
System.arraycopy(
negativeDeltas,
(level - 1) * config.numIndexDims(),
negativeDeltas,
level * config.numIndexDims(),
config.numIndexDims());
negativeDeltas[
level * config.numIndexDims() + (splitDimsPos[level - 1] / config.bytesPerDim())] =
isLeft;
if (splitValuesStack[level] == null) {
splitValuesStack[level] = splitValuesStack[level - 1].clone();
} else {
System.arraycopy(
splitValuesStack[level - 1],
0,
splitValuesStack[level],
0,
config.packedIndexBytesLength());
}
// read split dim, prefix, firstDiffByteDelta encoded as int:
int code = innerNodes.readVInt();
final int splitDim = code % config.numIndexDims();
splitDimsPos[level] = splitDim * config.bytesPerDim();
code /= config.numIndexDims();
final int prefix = code % (1 + config.bytesPerDim());
final int suffix = config.bytesPerDim() - prefix;
if (suffix > 0) {
int firstDiffByteDelta = code / (1 + config.bytesPerDim());
if (negativeDeltas[level * config.numIndexDims() + splitDim]) {
firstDiffByteDelta = -firstDiffByteDelta;
}
final int startPos = splitDimsPos[level] + prefix;
final int oldByte = splitValuesStack[level][startPos] & 0xFF;
splitValuesStack[level][startPos] = (byte) (oldByte + firstDiffByteDelta);
innerNodes.readBytes(splitValuesStack[level], startPos + 1, suffix - 1);
} else {
// our split value is == last split value in this dim, which can happen when there are
// many duplicate values
}
final int leftNumBytes;
if (nodeID * 2 < leafNodeOffset) {
leftNumBytes = innerNodes.readVInt();
} else {
leftNumBytes = 0;
}
rightNodePositions[level] = Math.toIntExact(innerNodes.getFilePointer()) + leftNumBytes;
readNodeDataPositions[level] = Math.toIntExact(innerNodes.getFilePointer());
}
}
private int getTreeDepth(int numLeaves) {
// First +1 because all the non-leave nodes makes another power
// of 2; e.g. to have a fully balanced tree with 4 leaves you
// need a depth=3 tree:
// Second +1 because MathUtil.log computes floor of the logarithm; e.g.
// with 5 leaves you need a depth=4 tree:
return MathUtil.log(numLeaves, 2) + 2;
}
private void visitDocValuesNoCardinality(
int[] commonPrefixLengths,
byte[] scratchDataPackedValue,
byte[] scratchMinIndexPackedValue,
byte[] scratchMaxIndexPackedValue,
IndexInput in,
BKDReaderDocIDSetIterator scratchIterator,
int count,
PointValues.IntersectVisitor visitor)
throws IOException {
readCommonPrefixes(commonPrefixLengths, scratchDataPackedValue, in);
if (config.numIndexDims() != 1 && version >= BKDWriter.VERSION_LEAF_STORES_BOUNDS) {
byte[] minPackedValue = scratchMinIndexPackedValue;
System.arraycopy(
scratchDataPackedValue, 0, minPackedValue, 0, config.packedIndexBytesLength());
byte[] maxPackedValue = scratchMaxIndexPackedValue;
// Copy common prefixes before reading adjusted box
System.arraycopy(minPackedValue, 0, maxPackedValue, 0, config.packedIndexBytesLength());
readMinMax(commonPrefixLengths, minPackedValue, maxPackedValue, in);
// The index gives us range of values for each dimension, but the actual range of values
// might be much more narrow than what the index told us, so we double check the relation
// here, which is cheap yet might help figure out that the block either entirely matches
// or does not match at all. This is especially more likely in the case that there are
// multiple dimensions that have correlation, ie. splitting on one dimension also
// significantly changes the range of values in another dimension.
PointValues.Relation r = visitor.compare(minPackedValue, maxPackedValue);
if (r == PointValues.Relation.CELL_OUTSIDE_QUERY) {
return;
}
visitor.grow(count);
if (r == PointValues.Relation.CELL_INSIDE_QUERY) {
scratchIntsRef.ints = scratchIterator.docIDs;
scratchIntsRef.length = count;
visitor.visit(scratchIntsRef);
return;
}
} else {
visitor.grow(count);
}
int compressedDim = readCompressedDim(in);
if (compressedDim == -1) {
visitUniqueRawDocValues(scratchDataPackedValue, scratchIterator, count, visitor);
} else {
visitCompressedDocValues(
commonPrefixLengths,
scratchDataPackedValue,
in,
scratchIterator,
count,
visitor,
compressedDim);
}
}
private void visitDocValuesWithCardinality(
int[] commonPrefixLengths,
byte[] scratchDataPackedValue,
byte[] scratchMinIndexPackedValue,
byte[] scratchMaxIndexPackedValue,
IndexInput in,
BKDReaderDocIDSetIterator scratchIterator,
int count,
PointValues.IntersectVisitor visitor)
throws IOException {
readCommonPrefixes(commonPrefixLengths, scratchDataPackedValue, in);
int compressedDim = readCompressedDim(in);
if (compressedDim == -1) {
// all values are the same
visitor.grow(count);
visitUniqueRawDocValues(scratchDataPackedValue, scratchIterator, count, visitor);
} else {
if (config.numIndexDims() != 1) {
byte[] minPackedValue = scratchMinIndexPackedValue;
System.arraycopy(
scratchDataPackedValue, 0, minPackedValue, 0, config.packedIndexBytesLength());
byte[] maxPackedValue = scratchMaxIndexPackedValue;
// Copy common prefixes before reading adjusted box
System.arraycopy(minPackedValue, 0, maxPackedValue, 0, config.packedIndexBytesLength());
readMinMax(commonPrefixLengths, minPackedValue, maxPackedValue, in);
// The index gives us range of values for each dimension, but the actual range of values
// might be much more narrow than what the index told us, so we double check the relation
// here, which is cheap yet might help figure out that the block either entirely matches
// or does not match at all. This is especially more likely in the case that there are
// multiple dimensions that have correlation, ie. splitting on one dimension also
// significantly changes the range of values in another dimension.
PointValues.Relation r = visitor.compare(minPackedValue, maxPackedValue);
if (r == PointValues.Relation.CELL_OUTSIDE_QUERY) {
return;
}
visitor.grow(count);
if (r == PointValues.Relation.CELL_INSIDE_QUERY) {
scratchIntsRef.ints = scratchIterator.docIDs;
scratchIntsRef.length = count;
visitor.visit(scratchIntsRef);
return;
}
} else {
visitor.grow(count);
}
if (compressedDim == -2) {
// low cardinality values
visitSparseRawDocValues(
commonPrefixLengths, scratchDataPackedValue, in, scratchIterator, count, visitor);
} else {
// high cardinality
visitCompressedDocValues(
commonPrefixLengths,
scratchDataPackedValue,
in,
scratchIterator,
count,
visitor,
compressedDim);
}
}
}
private void readMinMax(
int[] commonPrefixLengths, byte[] minPackedValue, byte[] maxPackedValue, IndexInput in)
throws IOException {
for (int dim = 0; dim < config.numIndexDims(); dim++) {
int prefix = commonPrefixLengths[dim];
in.readBytes(
minPackedValue, dim * config.bytesPerDim() + prefix, config.bytesPerDim() - prefix);
in.readBytes(
maxPackedValue, dim * config.bytesPerDim() + prefix, config.bytesPerDim() - prefix);
}
}
// read cardinality and point
private void visitSparseRawDocValues(
int[] commonPrefixLengths,
byte[] scratchPackedValue,
IndexInput in,
BKDReaderDocIDSetIterator scratchIterator,
int count,
PointValues.IntersectVisitor visitor)
throws IOException {
int i;
for (i = 0; i < count; ) {
int length = in.readVInt();
for (int dim = 0; dim < config.numDims(); dim++) {
int prefix = commonPrefixLengths[dim];
in.readBytes(
scratchPackedValue,
dim * config.bytesPerDim() + prefix,
config.bytesPerDim() - prefix);
}
scratchIterator.reset(i, length);
visitor.visit(scratchIterator, scratchPackedValue);
i += length;
}
if (i != count) {
throw new CorruptIndexException(
"Sub blocks do not add up to the expected count: " + count + " != " + i, in);
}
}
// point is under commonPrefix
private void visitUniqueRawDocValues(
byte[] scratchPackedValue,
BKDReaderDocIDSetIterator scratchIterator,
int count,
PointValues.IntersectVisitor visitor)
throws IOException {
scratchIterator.reset(0, count);
visitor.visit(scratchIterator, scratchPackedValue);
}
private void visitCompressedDocValues(
int[] commonPrefixLengths,
byte[] scratchPackedValue,
IndexInput in,
BKDReaderDocIDSetIterator scratchIterator,
int count,
PointValues.IntersectVisitor visitor,
int compressedDim)
throws IOException {
// the byte at `compressedByteOffset` is compressed using run-length compression,
// other suffix bytes are stored verbatim
final int compressedByteOffset =
compressedDim * config.bytesPerDim() + commonPrefixLengths[compressedDim];
commonPrefixLengths[compressedDim]++;
int i;
for (i = 0; i < count; ) {
scratchPackedValue[compressedByteOffset] = in.readByte();
final int runLen = Byte.toUnsignedInt(in.readByte());
for (int j = 0; j < runLen; ++j) {
for (int dim = 0; dim < config.numDims(); dim++) {
int prefix = commonPrefixLengths[dim];
in.readBytes(
scratchPackedValue,
dim * config.bytesPerDim() + prefix,
config.bytesPerDim() - prefix);
}
visitor.visit(scratchIterator.docIDs[i + j], scratchPackedValue);
}
i += runLen;
}
if (i != count) {
throw new CorruptIndexException(
"Sub blocks do not add up to the expected count: " + count + " != " + i, in);
}
}
private int readCompressedDim(IndexInput in) throws IOException {
int compressedDim = in.readByte();
if (compressedDim < -2
|| compressedDim >= config.numDims()
|| (version < BKDWriter.VERSION_LOW_CARDINALITY_LEAVES && compressedDim == -2)) {
throw new CorruptIndexException("Got compressedDim=" + compressedDim, in);
}
return compressedDim;
}
private void readCommonPrefixes(
int[] commonPrefixLengths, byte[] scratchPackedValue, IndexInput in) throws IOException {
for (int dim = 0; dim < config.numDims(); dim++) {
int prefix = in.readVInt();
commonPrefixLengths[dim] = prefix;
if (prefix > 0) {
in.readBytes(scratchPackedValue, dim * config.bytesPerDim(), prefix);
}
// System.out.println("R: " + dim + " of " + numDims + " prefix=" + prefix);
}
}
@Override
public String toString() {
return "nodeID=" + nodeID;
}
}
@Override
public byte[] getMinPackedValue() {
return minPackedValue.clone();
}
@Override
public byte[] getMaxPackedValue() {
return maxPackedValue.clone();
}
@Override
public int getNumDimensions() throws IOException {
return config.numDims();
}
@Override
public int getNumIndexDimensions() throws IOException {
return config.numIndexDims();
}
@Override
public int getBytesPerDimension() throws IOException {
return config.bytesPerDim();
}
@Override
public long size() {
return pointCount;
}
@Override
public int getDocCount() {
return docCount;
}
/** Reusable {@link DocIdSetIterator} to handle low cardinality leaves. */
private static class BKDReaderDocIDSetIterator extends AbstractDocIdSetIterator {
private int idx;
private int length;
private int offset;
final int[] docIDs;
private final DocIdsWriter docIdsWriter;
public BKDReaderDocIDSetIterator(int maxPointsInLeafNode, int version) {
this.docIDs = new int[maxPointsInLeafNode];
this.docIdsWriter = new DocIdsWriter(maxPointsInLeafNode, version);
}
private void reset(int offset, int length) {
this.offset = offset;
this.length = length;
assert offset + length <= docIDs.length;
this.doc = -1;
this.idx = 0;
}
@Override
public int nextDoc() throws IOException {
if (idx == length) {
doc = DocIdSetIterator.NO_MORE_DOCS;
} else {
doc = docIDs[offset + idx];
idx++;
}
return doc;
}
@Override
public int advance(int target) throws IOException {
return slowAdvance(target);
}
@Override
public long cost() {
return length;
}
}
}
|
apache/flink | 38,412 | flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/TimestampITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.streaming.runtime;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.JobStatus;
import org.apache.flink.api.common.eventtime.AscendingTimestampsWatermarks;
import org.apache.flink.api.common.eventtime.NoWatermarksGenerator;
import org.apache.flink.api.common.eventtime.TimestampAssigner;
import org.apache.flink.api.common.eventtime.TimestampAssignerSupplier;
import org.apache.flink.api.common.eventtime.WatermarkGenerator;
import org.apache.flink.api.common.eventtime.WatermarkGeneratorSupplier;
import org.apache.flink.api.common.eventtime.WatermarkOutput;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.core.execution.SavepointFormatType;
import org.apache.flink.core.testutils.MultiShotLatch;
import org.apache.flink.runtime.checkpoint.CheckpointException;
import org.apache.flink.runtime.checkpoint.CheckpointFailureReason;
import org.apache.flink.runtime.client.JobStatusMessage;
import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.CoMapFunction;
import org.apache.flink.streaming.api.functions.sink.v2.DiscardingSink;
import org.apache.flink.streaming.api.functions.source.legacy.SourceFunction;
import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.test.util.MiniClusterWithClientResource;
import org.apache.flink.util.ExceptionUtils;
import org.apache.flink.util.TestLogger;
import org.apache.flink.util.function.SerializableFunction;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import static org.apache.flink.test.checkpointing.SavepointITCase.waitUntilAllTasksAreRunning;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/** Tests for timestamps, watermarks, and event-time sources. */
@SuppressWarnings("serial")
public class TimestampITCase extends TestLogger {
@Rule public TemporaryFolder tmpFolder = new TemporaryFolder();
private static final int NUM_TASK_MANAGERS = 2;
private static final int NUM_TASK_SLOTS = 3;
private static final int PARALLELISM = NUM_TASK_MANAGERS * NUM_TASK_SLOTS;
// this is used in some tests to synchronize
static MultiShotLatch latch;
@ClassRule
public static final MiniClusterWithClientResource CLUSTER =
new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(getConfiguration())
.setNumberTaskManagers(NUM_TASK_MANAGERS)
.setNumberSlotsPerTaskManager(NUM_TASK_SLOTS)
.build());
private static Configuration getConfiguration() {
Configuration config = new Configuration();
config.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, MemorySize.parse("12m"));
return config;
}
@Before
public void setupLatch() {
// ensure that we get a fresh latch for each test
latch = new MultiShotLatch();
}
/**
* These check whether custom timestamp emission works at sources and also whether timestamps
* arrive at operators throughout a topology.
*
* <p>This also checks whether watermarks keep propagating if a source closes early.
*
* <p>This only uses map to test the workings of watermarks in a complete, running topology. All
* tasks and stream operators have dedicated tests that test the watermark propagation
* behaviour.
*/
@Test
public void testWatermarkPropagation() throws Exception {
final int numWatermarks = 10;
long initialTime = 0L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
DataStream<Integer> source1 =
env.addSource(new MyTimestampSource(initialTime, numWatermarks));
DataStream<Integer> source2 =
env.addSource(new MyTimestampSource(initialTime, numWatermarks / 2));
source1.union(source2)
.map(new IdentityMap())
.connect(source2)
.map(new IdentityCoMap())
.transform("Custom Operator", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true))
.sinkTo(new DiscardingSink<>());
env.execute();
// verify that all the watermarks arrived at the final custom operator
for (int i = 0; i < PARALLELISM; i++) {
// we are only guaranteed to see NUM_WATERMARKS / 2 watermarks because the
// other source stops emitting after that
for (int j = 0; j < numWatermarks / 2; j++) {
if (!CustomOperator.finalWatermarks[i]
.get(j)
.equals(new Watermark(initialTime + j))) {
System.err.println("All Watermarks: ");
for (int k = 0; k <= numWatermarks / 2; k++) {
System.err.println(CustomOperator.finalWatermarks[i].get(k));
}
fail("Wrong watermark.");
}
}
assertEquals(
Watermark.MAX_WATERMARK,
CustomOperator.finalWatermarks[i].get(
CustomOperator.finalWatermarks[i].size() - 1));
}
}
@Test
public void testSelfUnionWatermarkPropagation() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
DataStream<Integer> dataStream1 = env.fromData(1, 2, 3);
dataStream1
.union(dataStream1)
.transform(
"Custom Operator", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(false))
.sinkTo(new DiscardingSink<>());
env.execute();
assertEquals(
Watermark.MAX_WATERMARK,
CustomOperator.finalWatermarks[0].get(
CustomOperator.finalWatermarks[0].size() - 1));
}
@Test
public void testWatermarkPropagationNoFinalWatermarkOnStop() throws Exception {
// for this test to work, we need to be sure that no other jobs are being executed
final ClusterClient<?> clusterClient = CLUSTER.getClusterClient();
while (!getRunningJobs(clusterClient).isEmpty()) {
Thread.sleep(100);
}
final int numWatermarks = 10;
long initialTime = 0L;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
DataStream<Integer> source1 =
env.addSource(new MyTimestampSourceInfinite(initialTime, numWatermarks));
DataStream<Integer> source2 =
env.addSource(new MyTimestampSourceInfinite(initialTime, numWatermarks / 2));
source1.union(source2)
.map(new IdentityMap())
.connect(source2)
.map(new IdentityCoMap())
.transform("Custom Operator", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true))
.sinkTo(new DiscardingSink<Integer>());
Thread t =
new Thread("stopper") {
@Override
public void run() {
try {
// try until we get the running jobs
List<JobID> running = getRunningJobs(clusterClient);
while (running.isEmpty()) {
Thread.sleep(10);
running = getRunningJobs(clusterClient);
}
JobID id = running.get(0);
waitUntilAllTasksAreRunning(CLUSTER.getRestClusterClient(), id);
// send stop until the job is stopped
final String savepointDirName = tmpFolder.newFolder().getAbsolutePath();
do {
try {
clusterClient
.stopWithSavepoint(
id,
false,
savepointDirName,
SavepointFormatType.CANONICAL)
.get();
} catch (Exception e) {
boolean ignoreException =
ExceptionUtils.findThrowable(
e, CheckpointException.class)
.map(
CheckpointException
::getCheckpointFailureReason)
.map(
reason ->
reason
== CheckpointFailureReason
.NOT_ALL_REQUIRED_TASKS_RUNNING)
.orElse(false);
if (!ignoreException) {
throw e;
}
}
Thread.sleep(10);
} while (!getRunningJobs(clusterClient).isEmpty());
} catch (Throwable t) {
t.printStackTrace();
}
}
};
t.start();
env.execute();
// verify that all the watermarks arrived at the final custom operator
for (List<Watermark> subtaskWatermarks : CustomOperator.finalWatermarks) {
// we are only guaranteed to see NUM_WATERMARKS / 2 watermarks because the
// other source stops emitting after that
for (int j = 0; j < subtaskWatermarks.size(); j++) {
if (subtaskWatermarks.get(j).getTimestamp() != initialTime + j) {
System.err.println("All Watermarks: ");
for (int k = 0; k <= numWatermarks / 2; k++) {
System.err.println(subtaskWatermarks.get(k));
}
fail("Wrong watermark.");
}
}
// if there are watermarks, the final one must not be the MAX watermark
if (subtaskWatermarks.size() > 0) {
assertNotEquals(
Watermark.MAX_WATERMARK,
subtaskWatermarks.get(subtaskWatermarks.size() - 1));
}
}
t.join();
}
/**
* These check whether timestamps are properly assigned at the sources and handled in network
* transmission and between chained operators when timestamps are enabled.
*/
@Test
public void testTimestampHandling() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
DataStream<Integer> source1 = env.addSource(new MyTimestampSource(0L, numElements));
DataStream<Integer> source2 = env.addSource(new MyTimestampSource(0L, numElements));
source1.map(new IdentityMap())
.connect(source2)
.map(new IdentityCoMap())
.transform(
"Custom Operator",
BasicTypeInfo.INT_TYPE_INFO,
new TimestampCheckingOperator())
.sinkTo(new DiscardingSink<Integer>());
env.execute();
}
/**
* Verifies that we don't have timestamps when the source doesn't emit them with the records.
*/
@Test
public void testDisabledTimestamps() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
DataStream<Integer> source1 = env.addSource(new MyNonWatermarkingSource(numElements));
DataStream<Integer> source2 = env.addSource(new MyNonWatermarkingSource(numElements));
source1.map(new IdentityMap())
.connect(source2)
.map(new IdentityCoMap())
.transform(
"Custom Operator",
BasicTypeInfo.INT_TYPE_INFO,
new DisabledTimestampCheckingOperator())
.sinkTo(new DiscardingSink<Integer>());
env.execute();
}
/**
* This tests whether timestamps are properly extracted in the timestamp extractor and whether
* watermarks are also correctly forwarded from this with the auto watermark interval.
*/
@Test
public void testTimestampExtractorWithAutoInterval() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setAutoWatermarkInterval(10);
env.setParallelism(1);
DataStream<Integer> source1 =
env.addSource(
new SourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int index = 1;
while (index <= numElements) {
ctx.collect(index);
latch.await();
index++;
}
}
@Override
public void cancel() {}
});
DataStream<Integer> extractOp =
source1.assignTimestampsAndWatermarks(
AscendingRecordTimestampsWatermarkStrategy.create(Long::valueOf));
extractOp
.transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true))
.transform(
"Timestamp Check",
BasicTypeInfo.INT_TYPE_INFO,
new TimestampCheckingOperator());
// verify that extractor picks up source parallelism
Assert.assertEquals(
extractOp.getTransformation().getParallelism(),
source1.getTransformation().getParallelism());
env.execute();
// verify that we get NUM_ELEMENTS watermarks
for (int j = 0; j < numElements; j++) {
if (!CustomOperator.finalWatermarks[0].get(j).equals(new Watermark(j))) {
long wm = CustomOperator.finalWatermarks[0].get(j).getTimestamp();
Assert.fail(
"Wrong watermark. Expected: "
+ j
+ " Found: "
+ wm
+ " All: "
+ CustomOperator.finalWatermarks[0]);
}
}
// the input is finite, so it should have a MAX Watermark
assertEquals(
Watermark.MAX_WATERMARK,
CustomOperator.finalWatermarks[0].get(
CustomOperator.finalWatermarks[0].size() - 1));
}
/**
* This tests whether timestamps are properly extracted in the timestamp extractor and whether
* watermark are correctly forwarded from the custom watermark emit function.
*/
@Test
public void testTimestampExtractorWithCustomWatermarkEmit() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setAutoWatermarkInterval(10);
env.setParallelism(1);
DataStream<Integer> source1 =
env.addSource(
new SourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int index = 1;
while (index <= numElements) {
ctx.collect(index);
latch.await();
index++;
}
}
@Override
public void cancel() {}
});
source1.assignTimestampsAndWatermarks(
new WatermarkStrategy<Integer>() {
@Override
public TimestampAssigner<Integer> createTimestampAssigner(
TimestampAssignerSupplier.Context context) {
return (element, recordTimestamp) -> element;
}
@Override
public WatermarkGenerator<Integer> createWatermarkGenerator(
WatermarkGeneratorSupplier.Context context) {
return new WatermarkGenerator<Integer>() {
@Override
public void onEvent(
Integer event,
long eventTimestamp,
WatermarkOutput output) {
output.emitWatermark(
new org.apache.flink.api.common.eventtime.Watermark(
eventTimestamp - 1));
}
@Override
public void onPeriodicEmit(WatermarkOutput output) {}
};
}
})
.transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true))
.transform(
"Timestamp Check",
BasicTypeInfo.INT_TYPE_INFO,
new TimestampCheckingOperator());
env.execute();
// verify that we get NUM_ELEMENTS watermarks
for (int j = 0; j < numElements; j++) {
if (!CustomOperator.finalWatermarks[0].get(j).equals(new Watermark(j))) {
Assert.fail("Wrong watermark.");
}
}
// the input is finite, so it should have a MAX Watermark
assertEquals(
Watermark.MAX_WATERMARK,
CustomOperator.finalWatermarks[0].get(
CustomOperator.finalWatermarks[0].size() - 1));
}
/** This test verifies that the timestamp extractor does not emit decreasing watermarks. */
@Test
public void testTimestampExtractorWithDecreasingCustomWatermarkEmit() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setAutoWatermarkInterval(1);
env.setParallelism(1);
DataStream<Integer> source1 =
env.addSource(
new SourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int index = 1;
while (index <= numElements) {
ctx.collect(index);
Thread.sleep(100);
ctx.collect(index - 1);
latch.await();
index++;
}
}
@Override
public void cancel() {}
});
source1.assignTimestampsAndWatermarks(
new WatermarkStrategy<Integer>() {
@Override
public TimestampAssigner<Integer> createTimestampAssigner(
TimestampAssignerSupplier.Context context) {
return (element, recordTimestamp) -> element;
}
@Override
public WatermarkGenerator<Integer> createWatermarkGenerator(
WatermarkGeneratorSupplier.Context context) {
return new WatermarkGenerator<Integer>() {
@Override
public void onEvent(
Integer event,
long eventTimestamp,
WatermarkOutput output) {
output.emitWatermark(
new org.apache.flink.api.common.eventtime.Watermark(
eventTimestamp - 1));
}
@Override
public void onPeriodicEmit(WatermarkOutput output) {}
};
}
})
.transform("Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true))
.transform(
"Timestamp Check",
BasicTypeInfo.INT_TYPE_INFO,
new TimestampCheckingOperator());
env.execute();
// verify that we get NUM_ELEMENTS watermarks
for (int j = 0; j < numElements; j++) {
if (!CustomOperator.finalWatermarks[0].get(j).equals(new Watermark(j))) {
Assert.fail("Wrong watermark.");
}
}
// the input is finite, so it should have a MAX Watermark
assertEquals(
Watermark.MAX_WATERMARK,
CustomOperator.finalWatermarks[0].get(
CustomOperator.finalWatermarks[0].size() - 1));
}
/** This test verifies that the timestamp extractor forwards Long.MAX_VALUE watermarks. */
@Test
public void testTimestampExtractorWithLongMaxWatermarkFromSource() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setAutoWatermarkInterval(1);
env.setParallelism(2);
DataStream<Integer> source1 =
env.addSource(
new SourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int index = 1;
while (index <= numElements) {
ctx.collectWithTimestamp(index, index);
ctx.collectWithTimestamp(index - 1, index - 1);
index++;
ctx.emitWatermark(new Watermark(index - 2));
}
// emit the final Long.MAX_VALUE watermark, do it twice and verify
// that
// we only see one in the result
ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
}
@Override
public void cancel() {}
});
source1.assignTimestampsAndWatermarks(
(WatermarkStrategy<Integer>) context -> new NoWatermarksGenerator<>())
.transform(
"Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true));
env.execute();
Assert.assertTrue(CustomOperator.finalWatermarks[0].size() == 1);
Assert.assertTrue(
CustomOperator.finalWatermarks[0].get(0).getTimestamp() == Long.MAX_VALUE);
}
/**
* This test verifies that the timestamp extractor forwards Long.MAX_VALUE watermarks.
*
* <p>Same test as before, but using a different timestamp extractor.
*/
@Test
public void testTimestampExtractorWithLongMaxWatermarkFromSource2() throws Exception {
final int numElements = 10;
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getConfig().setAutoWatermarkInterval(10);
env.setParallelism(2);
DataStream<Integer> source1 =
env.addSource(
new SourceFunction<Integer>() {
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
int index = 1;
while (index <= numElements) {
ctx.collectWithTimestamp(index, index);
ctx.collectWithTimestamp(index - 1, index - 1);
index++;
ctx.emitWatermark(new Watermark(index - 2));
}
// emit the final Long.MAX_VALUE watermark, do it twice and verify
// that
// we only see one in the result
ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
}
@Override
public void cancel() {}
});
source1.assignTimestampsAndWatermarks(
(WatermarkStrategy<Integer>) context -> new NoWatermarksGenerator<>())
.transform(
"Watermark Check", BasicTypeInfo.INT_TYPE_INFO, new CustomOperator(true));
env.execute();
Assert.assertTrue(CustomOperator.finalWatermarks[0].size() == 1);
Assert.assertTrue(
CustomOperator.finalWatermarks[0].get(0).getTimestamp() == Long.MAX_VALUE);
}
@Test
public void testErrorOnEventTimeOverProcessingTime() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(2);
DataStream<Tuple2<String, Integer>> source1 =
env.fromData(new Tuple2<>("a", 1), new Tuple2<>("b", 2));
source1.keyBy(x -> x.f0)
.window(TumblingEventTimeWindows.of(Duration.ofSeconds(5)))
.reduce(
new ReduceFunction<Tuple2<String, Integer>>() {
@Override
public Tuple2<String, Integer> reduce(
Tuple2<String, Integer> value1,
Tuple2<String, Integer> value2) {
return value1;
}
})
.print();
try {
env.execute();
fail("this should fail with an exception");
} catch (Exception e) {
// expected
}
}
@Test
public void testErrorOnEventTimeWithoutTimestamps() {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(2);
DataStream<Tuple2<String, Integer>> source1 =
env.fromData(new Tuple2<>("a", 1), new Tuple2<>("b", 2));
source1.keyBy(x -> x.f0)
.window(TumblingEventTimeWindows.of(Duration.ofSeconds(5)))
.reduce(
new ReduceFunction<Tuple2<String, Integer>>() {
@Override
public Tuple2<String, Integer> reduce(
Tuple2<String, Integer> value1,
Tuple2<String, Integer> value2) {
return value1;
}
})
.print();
try {
env.execute();
fail("this should fail with an exception");
} catch (Exception e) {
// expected
}
}
// ------------------------------------------------------------------------
// Custom Operators and Functions
// ------------------------------------------------------------------------
@SuppressWarnings("unchecked")
private static class CustomOperator extends AbstractStreamOperator<Integer>
implements OneInputStreamOperator<Integer, Integer> {
List<Watermark> watermarks;
public static List<Watermark>[] finalWatermarks = new List[PARALLELISM];
private final boolean timestampsEnabled;
public CustomOperator(boolean timestampsEnabled) {
this.timestampsEnabled = timestampsEnabled;
}
@Override
public void processElement(StreamRecord<Integer> element) throws Exception {
if (timestampsEnabled) {
if (element.getTimestamp() != element.getValue()) {
Assert.fail("Timestamps are not properly handled.");
}
}
output.collect(element);
}
@Override
public void processWatermark(Watermark mark) throws Exception {
super.processWatermark(mark);
for (Watermark previousMark : watermarks) {
assertTrue(previousMark.getTimestamp() < mark.getTimestamp());
}
watermarks.add(mark);
latch.trigger();
output.emitWatermark(mark);
}
@Override
public void open() throws Exception {
super.open();
watermarks = new ArrayList<>();
}
@Override
public void close() throws Exception {
super.close();
finalWatermarks[getRuntimeContext().getTaskInfo().getIndexOfThisSubtask()] = watermarks;
}
}
private static class TimestampCheckingOperator extends AbstractStreamOperator<Integer>
implements OneInputStreamOperator<Integer, Integer> {
public TimestampCheckingOperator() {}
@Override
public void processElement(StreamRecord<Integer> element) throws Exception {
if (element.getTimestamp() != element.getValue()) {
Assert.fail("Timestamps are not properly handled.");
}
output.collect(element);
}
}
private static class DisabledTimestampCheckingOperator extends AbstractStreamOperator<Integer>
implements OneInputStreamOperator<Integer, Integer> {
@Override
public void processElement(StreamRecord<Integer> element) throws Exception {
if (element.hasTimestamp()) {
Assert.fail("Timestamps are not properly handled.");
}
output.collect(element);
}
}
private static class IdentityCoMap implements CoMapFunction<Integer, Integer, Integer> {
@Override
public Integer map1(Integer value) throws Exception {
return value;
}
@Override
public Integer map2(Integer value) throws Exception {
return value;
}
}
private static class IdentityMap implements MapFunction<Integer, Integer> {
@Override
public Integer map(Integer value) throws Exception {
return value;
}
}
private static class MyTimestampSource implements SourceFunction<Integer> {
private final long initialTime;
private final int numWatermarks;
public MyTimestampSource(long initialTime, int numWatermarks) {
this.initialTime = initialTime;
this.numWatermarks = numWatermarks;
}
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
for (int i = 0; i < numWatermarks; i++) {
ctx.collectWithTimestamp(i, initialTime + i);
ctx.emitWatermark(new Watermark(initialTime + i));
}
}
@Override
public void cancel() {}
}
private static class MyTimestampSourceInfinite implements SourceFunction<Integer> {
private final long initialTime;
private final int numWatermarks;
private volatile boolean running = true;
public MyTimestampSourceInfinite(long initialTime, int numWatermarks) {
this.initialTime = initialTime;
this.numWatermarks = numWatermarks;
}
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
for (int i = 0; i < numWatermarks; i++) {
ctx.collectWithTimestamp(i, initialTime + i);
ctx.emitWatermark(new Watermark(initialTime + i));
}
while (running) {
Thread.sleep(20);
}
}
@Override
public void cancel() {
running = false;
}
}
private static class MyNonWatermarkingSource implements SourceFunction<Integer> {
int numWatermarks;
public MyNonWatermarkingSource(int numWatermarks) {
this.numWatermarks = numWatermarks;
}
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
for (int i = 0; i < numWatermarks; i++) {
ctx.collect(i);
}
}
@Override
public void cancel() {}
}
private static List<JobID> getRunningJobs(ClusterClient<?> client) throws Exception {
Collection<JobStatusMessage> statusMessages = client.listJobs().get();
return statusMessages.stream()
.filter(status -> status.getJobState() == JobStatus.RUNNING)
.map(JobStatusMessage::getJobId)
.collect(Collectors.toList());
}
public static class AscendingRecordTimestampsWatermarkStrategy<T>
implements WatermarkStrategy<T> {
private final SerializableFunction<T, Long> timestampExtractor;
public AscendingRecordTimestampsWatermarkStrategy(
SerializableFunction<T, Long> timestampExtractor) {
this.timestampExtractor = timestampExtractor;
}
public static <T> AscendingRecordTimestampsWatermarkStrategy<T> create(
SerializableFunction<T, Long> timestampExtractor) {
return new AscendingRecordTimestampsWatermarkStrategy<>(timestampExtractor);
}
@Override
public WatermarkGenerator<T> createWatermarkGenerator(
WatermarkGeneratorSupplier.Context context) {
return new AscendingTimestampsWatermarks<>();
}
@Override
public TimestampAssigner<T> createTimestampAssigner(
TimestampAssignerSupplier.Context context) {
return (event, ignore) -> timestampExtractor.apply(event);
}
}
}
|
googleads/google-ads-java | 38,349 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/ListPlannableUserListsRequest.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/reach_plan_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* Request message for
* [ReachPlanService.ListPlannableUserLists][google.ads.googleads.v21.services.ReachPlanService.ListPlannableUserLists]
* that lists the available user lists for a customer.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListPlannableUserListsRequest}
*/
public final class ListPlannableUserListsRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.ListPlannableUserListsRequest)
ListPlannableUserListsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListPlannableUserListsRequest.newBuilder() to construct.
private ListPlannableUserListsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListPlannableUserListsRequest() {
customerId_ = "";
customerReachGroup_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListPlannableUserListsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_ListPlannableUserListsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_ListPlannableUserListsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListPlannableUserListsRequest.class, com.google.ads.googleads.v21.services.ListPlannableUserListsRequest.Builder.class);
}
private int bitField0_;
public static final int CUSTOMER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
@java.lang.Override
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CUSTOMER_REACH_GROUP_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object customerReachGroup_ = "";
/**
* <pre>
* The name of the customer being planned for. This is a user-defined value.
* </pre>
*
* <code>optional string customer_reach_group = 2;</code>
* @return Whether the customerReachGroup field is set.
*/
@java.lang.Override
public boolean hasCustomerReachGroup() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The name of the customer being planned for. This is a user-defined value.
* </pre>
*
* <code>optional string customer_reach_group = 2;</code>
* @return The customerReachGroup.
*/
@java.lang.Override
public java.lang.String getCustomerReachGroup() {
java.lang.Object ref = customerReachGroup_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerReachGroup_ = s;
return s;
}
}
/**
* <pre>
* The name of the customer being planned for. This is a user-defined value.
* </pre>
*
* <code>optional string customer_reach_group = 2;</code>
* @return The bytes for customerReachGroup.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getCustomerReachGroupBytes() {
java.lang.Object ref = customerReachGroup_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerReachGroup_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REACH_APPLICATION_INFO_FIELD_NUMBER = 3;
private com.google.ads.googleads.v21.common.AdditionalApplicationInfo reachApplicationInfo_;
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return Whether the reachApplicationInfo field is set.
*/
@java.lang.Override
public boolean hasReachApplicationInfo() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The reachApplicationInfo.
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AdditionalApplicationInfo getReachApplicationInfo() {
return reachApplicationInfo_ == null ? com.google.ads.googleads.v21.common.AdditionalApplicationInfo.getDefaultInstance() : reachApplicationInfo_;
}
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AdditionalApplicationInfoOrBuilder getReachApplicationInfoOrBuilder() {
return reachApplicationInfo_ == null ? com.google.ads.googleads.v21.common.AdditionalApplicationInfo.getDefaultInstance() : reachApplicationInfo_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, customerId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, customerReachGroup_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(3, getReachApplicationInfo());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(customerId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, customerId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, customerReachGroup_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getReachApplicationInfo());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.ListPlannableUserListsRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.ListPlannableUserListsRequest other = (com.google.ads.googleads.v21.services.ListPlannableUserListsRequest) obj;
if (!getCustomerId()
.equals(other.getCustomerId())) return false;
if (hasCustomerReachGroup() != other.hasCustomerReachGroup()) return false;
if (hasCustomerReachGroup()) {
if (!getCustomerReachGroup()
.equals(other.getCustomerReachGroup())) return false;
}
if (hasReachApplicationInfo() != other.hasReachApplicationInfo()) return false;
if (hasReachApplicationInfo()) {
if (!getReachApplicationInfo()
.equals(other.getReachApplicationInfo())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CUSTOMER_ID_FIELD_NUMBER;
hash = (53 * hash) + getCustomerId().hashCode();
if (hasCustomerReachGroup()) {
hash = (37 * hash) + CUSTOMER_REACH_GROUP_FIELD_NUMBER;
hash = (53 * hash) + getCustomerReachGroup().hashCode();
}
if (hasReachApplicationInfo()) {
hash = (37 * hash) + REACH_APPLICATION_INFO_FIELD_NUMBER;
hash = (53 * hash) + getReachApplicationInfo().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.ListPlannableUserListsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for
* [ReachPlanService.ListPlannableUserLists][google.ads.googleads.v21.services.ReachPlanService.ListPlannableUserLists]
* that lists the available user lists for a customer.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListPlannableUserListsRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.ListPlannableUserListsRequest)
com.google.ads.googleads.v21.services.ListPlannableUserListsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_ListPlannableUserListsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_ListPlannableUserListsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListPlannableUserListsRequest.class, com.google.ads.googleads.v21.services.ListPlannableUserListsRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.ListPlannableUserListsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getReachApplicationInfoFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
customerId_ = "";
customerReachGroup_ = "";
reachApplicationInfo_ = null;
if (reachApplicationInfoBuilder_ != null) {
reachApplicationInfoBuilder_.dispose();
reachApplicationInfoBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v21_services_ListPlannableUserListsRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListPlannableUserListsRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.ListPlannableUserListsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListPlannableUserListsRequest build() {
com.google.ads.googleads.v21.services.ListPlannableUserListsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListPlannableUserListsRequest buildPartial() {
com.google.ads.googleads.v21.services.ListPlannableUserListsRequest result = new com.google.ads.googleads.v21.services.ListPlannableUserListsRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.services.ListPlannableUserListsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.customerId_ = customerId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.customerReachGroup_ = customerReachGroup_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.reachApplicationInfo_ = reachApplicationInfoBuilder_ == null
? reachApplicationInfo_
: reachApplicationInfoBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.ListPlannableUserListsRequest) {
return mergeFrom((com.google.ads.googleads.v21.services.ListPlannableUserListsRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.ListPlannableUserListsRequest other) {
if (other == com.google.ads.googleads.v21.services.ListPlannableUserListsRequest.getDefaultInstance()) return this;
if (!other.getCustomerId().isEmpty()) {
customerId_ = other.customerId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasCustomerReachGroup()) {
customerReachGroup_ = other.customerReachGroup_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasReachApplicationInfo()) {
mergeReachApplicationInfo(other.getReachApplicationInfo());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
customerId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
customerReachGroup_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26: {
input.readMessage(
getReachApplicationInfoFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object customerId_ = "";
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The customerId.
*/
public java.lang.String getCustomerId() {
java.lang.Object ref = customerId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for customerId.
*/
public com.google.protobuf.ByteString
getCustomerIdBytes() {
java.lang.Object ref = customerId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerId(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearCustomerId() {
customerId_ = getDefaultInstance().getCustomerId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The ID of the customer.
* </pre>
*
* <code>string customer_id = 1 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for customerId to set.
* @return This builder for chaining.
*/
public Builder setCustomerIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customerId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object customerReachGroup_ = "";
/**
* <pre>
* The name of the customer being planned for. This is a user-defined value.
* </pre>
*
* <code>optional string customer_reach_group = 2;</code>
* @return Whether the customerReachGroup field is set.
*/
public boolean hasCustomerReachGroup() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* The name of the customer being planned for. This is a user-defined value.
* </pre>
*
* <code>optional string customer_reach_group = 2;</code>
* @return The customerReachGroup.
*/
public java.lang.String getCustomerReachGroup() {
java.lang.Object ref = customerReachGroup_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
customerReachGroup_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The name of the customer being planned for. This is a user-defined value.
* </pre>
*
* <code>optional string customer_reach_group = 2;</code>
* @return The bytes for customerReachGroup.
*/
public com.google.protobuf.ByteString
getCustomerReachGroupBytes() {
java.lang.Object ref = customerReachGroup_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
customerReachGroup_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The name of the customer being planned for. This is a user-defined value.
* </pre>
*
* <code>optional string customer_reach_group = 2;</code>
* @param value The customerReachGroup to set.
* @return This builder for chaining.
*/
public Builder setCustomerReachGroup(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
customerReachGroup_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* The name of the customer being planned for. This is a user-defined value.
* </pre>
*
* <code>optional string customer_reach_group = 2;</code>
* @return This builder for chaining.
*/
public Builder clearCustomerReachGroup() {
customerReachGroup_ = getDefaultInstance().getCustomerReachGroup();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* The name of the customer being planned for. This is a user-defined value.
* </pre>
*
* <code>optional string customer_reach_group = 2;</code>
* @param value The bytes for customerReachGroup to set.
* @return This builder for chaining.
*/
public Builder setCustomerReachGroupBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
customerReachGroup_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.ads.googleads.v21.common.AdditionalApplicationInfo reachApplicationInfo_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AdditionalApplicationInfo, com.google.ads.googleads.v21.common.AdditionalApplicationInfo.Builder, com.google.ads.googleads.v21.common.AdditionalApplicationInfoOrBuilder> reachApplicationInfoBuilder_;
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return Whether the reachApplicationInfo field is set.
*/
public boolean hasReachApplicationInfo() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return The reachApplicationInfo.
*/
public com.google.ads.googleads.v21.common.AdditionalApplicationInfo getReachApplicationInfo() {
if (reachApplicationInfoBuilder_ == null) {
return reachApplicationInfo_ == null ? com.google.ads.googleads.v21.common.AdditionalApplicationInfo.getDefaultInstance() : reachApplicationInfo_;
} else {
return reachApplicationInfoBuilder_.getMessage();
}
}
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setReachApplicationInfo(com.google.ads.googleads.v21.common.AdditionalApplicationInfo value) {
if (reachApplicationInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
reachApplicationInfo_ = value;
} else {
reachApplicationInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setReachApplicationInfo(
com.google.ads.googleads.v21.common.AdditionalApplicationInfo.Builder builderForValue) {
if (reachApplicationInfoBuilder_ == null) {
reachApplicationInfo_ = builderForValue.build();
} else {
reachApplicationInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder mergeReachApplicationInfo(com.google.ads.googleads.v21.common.AdditionalApplicationInfo value) {
if (reachApplicationInfoBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0) &&
reachApplicationInfo_ != null &&
reachApplicationInfo_ != com.google.ads.googleads.v21.common.AdditionalApplicationInfo.getDefaultInstance()) {
getReachApplicationInfoBuilder().mergeFrom(value);
} else {
reachApplicationInfo_ = value;
}
} else {
reachApplicationInfoBuilder_.mergeFrom(value);
}
if (reachApplicationInfo_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder clearReachApplicationInfo() {
bitField0_ = (bitField0_ & ~0x00000004);
reachApplicationInfo_ = null;
if (reachApplicationInfoBuilder_ != null) {
reachApplicationInfoBuilder_.dispose();
reachApplicationInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.ads.googleads.v21.common.AdditionalApplicationInfo.Builder getReachApplicationInfoBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getReachApplicationInfoFieldBuilder().getBuilder();
}
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.ads.googleads.v21.common.AdditionalApplicationInfoOrBuilder getReachApplicationInfoOrBuilder() {
if (reachApplicationInfoBuilder_ != null) {
return reachApplicationInfoBuilder_.getMessageOrBuilder();
} else {
return reachApplicationInfo_ == null ?
com.google.ads.googleads.v21.common.AdditionalApplicationInfo.getDefaultInstance() : reachApplicationInfo_;
}
}
/**
* <pre>
* Optional. Additional information on the application issuing the request.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdditionalApplicationInfo reach_application_info = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AdditionalApplicationInfo, com.google.ads.googleads.v21.common.AdditionalApplicationInfo.Builder, com.google.ads.googleads.v21.common.AdditionalApplicationInfoOrBuilder>
getReachApplicationInfoFieldBuilder() {
if (reachApplicationInfoBuilder_ == null) {
reachApplicationInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AdditionalApplicationInfo, com.google.ads.googleads.v21.common.AdditionalApplicationInfo.Builder, com.google.ads.googleads.v21.common.AdditionalApplicationInfoOrBuilder>(
getReachApplicationInfo(),
getParentForChildren(),
isClean());
reachApplicationInfo_ = null;
}
return reachApplicationInfoBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.ListPlannableUserListsRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.ListPlannableUserListsRequest)
private static final com.google.ads.googleads.v21.services.ListPlannableUserListsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.ListPlannableUserListsRequest();
}
public static com.google.ads.googleads.v21.services.ListPlannableUserListsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListPlannableUserListsRequest>
PARSER = new com.google.protobuf.AbstractParser<ListPlannableUserListsRequest>() {
@java.lang.Override
public ListPlannableUserListsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListPlannableUserListsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListPlannableUserListsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListPlannableUserListsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,177 | java-chat/proto-google-cloud-chat-v1/src/main/java/com/google/chat/v1/ListSpaceEventsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/chat/v1/space_event.proto
// Protobuf Java Version: 3.25.8
package com.google.chat.v1;
/**
*
*
* <pre>
* Response message for listing space events.
* </pre>
*
* Protobuf type {@code google.chat.v1.ListSpaceEventsResponse}
*/
public final class ListSpaceEventsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.chat.v1.ListSpaceEventsResponse)
ListSpaceEventsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSpaceEventsResponse.newBuilder() to construct.
private ListSpaceEventsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSpaceEventsResponse() {
spaceEvents_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSpaceEventsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.chat.v1.SpaceEventProto
.internal_static_google_chat_v1_ListSpaceEventsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.chat.v1.SpaceEventProto
.internal_static_google_chat_v1_ListSpaceEventsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.chat.v1.ListSpaceEventsResponse.class,
com.google.chat.v1.ListSpaceEventsResponse.Builder.class);
}
public static final int SPACE_EVENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.chat.v1.SpaceEvent> spaceEvents_;
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.chat.v1.SpaceEvent> getSpaceEventsList() {
return spaceEvents_;
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.chat.v1.SpaceEventOrBuilder>
getSpaceEventsOrBuilderList() {
return spaceEvents_;
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
@java.lang.Override
public int getSpaceEventsCount() {
return spaceEvents_.size();
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
@java.lang.Override
public com.google.chat.v1.SpaceEvent getSpaceEvents(int index) {
return spaceEvents_.get(index);
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
@java.lang.Override
public com.google.chat.v1.SpaceEventOrBuilder getSpaceEventsOrBuilder(int index) {
return spaceEvents_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Continuation token used to fetch more events.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Continuation token used to fetch more events.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < spaceEvents_.size(); i++) {
output.writeMessage(1, spaceEvents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < spaceEvents_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, spaceEvents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.chat.v1.ListSpaceEventsResponse)) {
return super.equals(obj);
}
com.google.chat.v1.ListSpaceEventsResponse other =
(com.google.chat.v1.ListSpaceEventsResponse) obj;
if (!getSpaceEventsList().equals(other.getSpaceEventsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSpaceEventsCount() > 0) {
hash = (37 * hash) + SPACE_EVENTS_FIELD_NUMBER;
hash = (53 * hash) + getSpaceEventsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.chat.v1.ListSpaceEventsResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.ListSpaceEventsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.ListSpaceEventsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.ListSpaceEventsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.ListSpaceEventsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.ListSpaceEventsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.ListSpaceEventsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.chat.v1.ListSpaceEventsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.chat.v1.ListSpaceEventsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.chat.v1.ListSpaceEventsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.chat.v1.ListSpaceEventsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.chat.v1.ListSpaceEventsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.chat.v1.ListSpaceEventsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for listing space events.
* </pre>
*
* Protobuf type {@code google.chat.v1.ListSpaceEventsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.chat.v1.ListSpaceEventsResponse)
com.google.chat.v1.ListSpaceEventsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.chat.v1.SpaceEventProto
.internal_static_google_chat_v1_ListSpaceEventsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.chat.v1.SpaceEventProto
.internal_static_google_chat_v1_ListSpaceEventsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.chat.v1.ListSpaceEventsResponse.class,
com.google.chat.v1.ListSpaceEventsResponse.Builder.class);
}
// Construct using com.google.chat.v1.ListSpaceEventsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (spaceEventsBuilder_ == null) {
spaceEvents_ = java.util.Collections.emptyList();
} else {
spaceEvents_ = null;
spaceEventsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.chat.v1.SpaceEventProto
.internal_static_google_chat_v1_ListSpaceEventsResponse_descriptor;
}
@java.lang.Override
public com.google.chat.v1.ListSpaceEventsResponse getDefaultInstanceForType() {
return com.google.chat.v1.ListSpaceEventsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.chat.v1.ListSpaceEventsResponse build() {
com.google.chat.v1.ListSpaceEventsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.chat.v1.ListSpaceEventsResponse buildPartial() {
com.google.chat.v1.ListSpaceEventsResponse result =
new com.google.chat.v1.ListSpaceEventsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.chat.v1.ListSpaceEventsResponse result) {
if (spaceEventsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
spaceEvents_ = java.util.Collections.unmodifiableList(spaceEvents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.spaceEvents_ = spaceEvents_;
} else {
result.spaceEvents_ = spaceEventsBuilder_.build();
}
}
private void buildPartial0(com.google.chat.v1.ListSpaceEventsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.chat.v1.ListSpaceEventsResponse) {
return mergeFrom((com.google.chat.v1.ListSpaceEventsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.chat.v1.ListSpaceEventsResponse other) {
if (other == com.google.chat.v1.ListSpaceEventsResponse.getDefaultInstance()) return this;
if (spaceEventsBuilder_ == null) {
if (!other.spaceEvents_.isEmpty()) {
if (spaceEvents_.isEmpty()) {
spaceEvents_ = other.spaceEvents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSpaceEventsIsMutable();
spaceEvents_.addAll(other.spaceEvents_);
}
onChanged();
}
} else {
if (!other.spaceEvents_.isEmpty()) {
if (spaceEventsBuilder_.isEmpty()) {
spaceEventsBuilder_.dispose();
spaceEventsBuilder_ = null;
spaceEvents_ = other.spaceEvents_;
bitField0_ = (bitField0_ & ~0x00000001);
spaceEventsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSpaceEventsFieldBuilder()
: null;
} else {
spaceEventsBuilder_.addAllMessages(other.spaceEvents_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.chat.v1.SpaceEvent m =
input.readMessage(com.google.chat.v1.SpaceEvent.parser(), extensionRegistry);
if (spaceEventsBuilder_ == null) {
ensureSpaceEventsIsMutable();
spaceEvents_.add(m);
} else {
spaceEventsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.chat.v1.SpaceEvent> spaceEvents_ =
java.util.Collections.emptyList();
private void ensureSpaceEventsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
spaceEvents_ = new java.util.ArrayList<com.google.chat.v1.SpaceEvent>(spaceEvents_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.chat.v1.SpaceEvent,
com.google.chat.v1.SpaceEvent.Builder,
com.google.chat.v1.SpaceEventOrBuilder>
spaceEventsBuilder_;
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public java.util.List<com.google.chat.v1.SpaceEvent> getSpaceEventsList() {
if (spaceEventsBuilder_ == null) {
return java.util.Collections.unmodifiableList(spaceEvents_);
} else {
return spaceEventsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public int getSpaceEventsCount() {
if (spaceEventsBuilder_ == null) {
return spaceEvents_.size();
} else {
return spaceEventsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public com.google.chat.v1.SpaceEvent getSpaceEvents(int index) {
if (spaceEventsBuilder_ == null) {
return spaceEvents_.get(index);
} else {
return spaceEventsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public Builder setSpaceEvents(int index, com.google.chat.v1.SpaceEvent value) {
if (spaceEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpaceEventsIsMutable();
spaceEvents_.set(index, value);
onChanged();
} else {
spaceEventsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public Builder setSpaceEvents(
int index, com.google.chat.v1.SpaceEvent.Builder builderForValue) {
if (spaceEventsBuilder_ == null) {
ensureSpaceEventsIsMutable();
spaceEvents_.set(index, builderForValue.build());
onChanged();
} else {
spaceEventsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public Builder addSpaceEvents(com.google.chat.v1.SpaceEvent value) {
if (spaceEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpaceEventsIsMutable();
spaceEvents_.add(value);
onChanged();
} else {
spaceEventsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public Builder addSpaceEvents(int index, com.google.chat.v1.SpaceEvent value) {
if (spaceEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpaceEventsIsMutable();
spaceEvents_.add(index, value);
onChanged();
} else {
spaceEventsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public Builder addSpaceEvents(com.google.chat.v1.SpaceEvent.Builder builderForValue) {
if (spaceEventsBuilder_ == null) {
ensureSpaceEventsIsMutable();
spaceEvents_.add(builderForValue.build());
onChanged();
} else {
spaceEventsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public Builder addSpaceEvents(
int index, com.google.chat.v1.SpaceEvent.Builder builderForValue) {
if (spaceEventsBuilder_ == null) {
ensureSpaceEventsIsMutable();
spaceEvents_.add(index, builderForValue.build());
onChanged();
} else {
spaceEventsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public Builder addAllSpaceEvents(
java.lang.Iterable<? extends com.google.chat.v1.SpaceEvent> values) {
if (spaceEventsBuilder_ == null) {
ensureSpaceEventsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, spaceEvents_);
onChanged();
} else {
spaceEventsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public Builder clearSpaceEvents() {
if (spaceEventsBuilder_ == null) {
spaceEvents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
spaceEventsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public Builder removeSpaceEvents(int index) {
if (spaceEventsBuilder_ == null) {
ensureSpaceEventsIsMutable();
spaceEvents_.remove(index);
onChanged();
} else {
spaceEventsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public com.google.chat.v1.SpaceEvent.Builder getSpaceEventsBuilder(int index) {
return getSpaceEventsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public com.google.chat.v1.SpaceEventOrBuilder getSpaceEventsOrBuilder(int index) {
if (spaceEventsBuilder_ == null) {
return spaceEvents_.get(index);
} else {
return spaceEventsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public java.util.List<? extends com.google.chat.v1.SpaceEventOrBuilder>
getSpaceEventsOrBuilderList() {
if (spaceEventsBuilder_ != null) {
return spaceEventsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(spaceEvents_);
}
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public com.google.chat.v1.SpaceEvent.Builder addSpaceEventsBuilder() {
return getSpaceEventsFieldBuilder()
.addBuilder(com.google.chat.v1.SpaceEvent.getDefaultInstance());
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public com.google.chat.v1.SpaceEvent.Builder addSpaceEventsBuilder(int index) {
return getSpaceEventsFieldBuilder()
.addBuilder(index, com.google.chat.v1.SpaceEvent.getDefaultInstance());
}
/**
*
*
* <pre>
* Results are returned in chronological order (oldest event first).
* Note: The `permissionSettings` field is not returned in the Space
* object for list requests.
* </pre>
*
* <code>repeated .google.chat.v1.SpaceEvent space_events = 1;</code>
*/
public java.util.List<com.google.chat.v1.SpaceEvent.Builder> getSpaceEventsBuilderList() {
return getSpaceEventsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.chat.v1.SpaceEvent,
com.google.chat.v1.SpaceEvent.Builder,
com.google.chat.v1.SpaceEventOrBuilder>
getSpaceEventsFieldBuilder() {
if (spaceEventsBuilder_ == null) {
spaceEventsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.chat.v1.SpaceEvent,
com.google.chat.v1.SpaceEvent.Builder,
com.google.chat.v1.SpaceEventOrBuilder>(
spaceEvents_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
spaceEvents_ = null;
}
return spaceEventsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Continuation token used to fetch more events.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Continuation token used to fetch more events.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Continuation token used to fetch more events.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Continuation token used to fetch more events.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Continuation token used to fetch more events.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.chat.v1.ListSpaceEventsResponse)
}
// @@protoc_insertion_point(class_scope:google.chat.v1.ListSpaceEventsResponse)
private static final com.google.chat.v1.ListSpaceEventsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.chat.v1.ListSpaceEventsResponse();
}
public static com.google.chat.v1.ListSpaceEventsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSpaceEventsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListSpaceEventsResponse>() {
@java.lang.Override
public ListSpaceEventsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSpaceEventsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSpaceEventsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.chat.v1.ListSpaceEventsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/ExoPlayer | 38,498 | library/core/src/main/java/com/google/android/exoplayer2/analytics/MediaMetricsListener.java | /*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.analytics;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Util.castNonNull;
import android.annotation.SuppressLint;
import android.content.Context;
import android.media.DeniedByServerException;
import android.media.MediaCodec;
import android.media.MediaDrm;
import android.media.MediaDrmResetException;
import android.media.NotProvisionedException;
import android.media.metrics.LogSessionId;
import android.media.metrics.MediaMetricsManager;
import android.media.metrics.NetworkEvent;
import android.media.metrics.PlaybackErrorEvent;
import android.media.metrics.PlaybackMetrics;
import android.media.metrics.PlaybackSession;
import android.media.metrics.PlaybackStateEvent;
import android.media.metrics.TrackChangeEvent;
import android.os.SystemClock;
import android.system.ErrnoException;
import android.system.OsConstants;
import android.util.Pair;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.ContentType;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayerLibraryInfo;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.ParserException;
import com.google.android.exoplayer2.PlaybackException;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.Tracks;
import com.google.android.exoplayer2.audio.AudioSink;
import com.google.android.exoplayer2.decoder.DecoderCounters;
import com.google.android.exoplayer2.drm.DefaultDrmSessionManager;
import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.drm.DrmSession;
import com.google.android.exoplayer2.drm.UnsupportedDrmException;
import com.google.android.exoplayer2.mediacodec.MediaCodecDecoderException;
import com.google.android.exoplayer2.mediacodec.MediaCodecRenderer;
import com.google.android.exoplayer2.source.LoadEventInfo;
import com.google.android.exoplayer2.source.MediaLoadData;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.upstream.FileDataSource;
import com.google.android.exoplayer2.upstream.HttpDataSource;
import com.google.android.exoplayer2.upstream.UdpDataSource;
import com.google.android.exoplayer2.util.NetworkTypeObserver;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.VideoSize;
import com.google.common.collect.ImmutableList;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.SocketTimeoutException;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.UUID;
import org.checkerframework.checker.nullness.compatqual.NullableType;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/**
* An {@link AnalyticsListener} that interacts with the Android {@link MediaMetricsManager}.
*
* <p>It listens to playback events and forwards them to a {@link PlaybackSession}. The {@link
* LogSessionId} of the playback session can be obtained with {@link #getLogSessionId()}.
*
* @deprecated com.google.android.exoplayer2 is deprecated. Please migrate to androidx.media3 (which
* contains the same ExoPlayer code). See <a
* href="https://developer.android.com/guide/topics/media/media3/getting-started/migration-guide">the
* migration guide</a> for more details, including a script to help with the migration.
*/
@RequiresApi(31)
@Deprecated
public final class MediaMetricsListener
implements AnalyticsListener, PlaybackSessionManager.Listener {
/**
* Creates a media metrics listener.
*
* @param context A context.
* @return The {@link MediaMetricsListener}, or null if the {@link Context#MEDIA_METRICS_SERVICE
* media metrics service} isn't available.
*/
@Nullable
public static MediaMetricsListener create(Context context) {
@Nullable
MediaMetricsManager mediaMetricsManager =
(MediaMetricsManager) context.getSystemService(Context.MEDIA_METRICS_SERVICE);
return mediaMetricsManager == null
? null
: new MediaMetricsListener(context, mediaMetricsManager.createPlaybackSession());
}
private final Context context;
private final PlaybackSessionManager sessionManager;
private final PlaybackSession playbackSession;
private final long startTimeMs;
private final Timeline.Window window;
private final Timeline.Period period;
private final HashMap<String, Long> bandwidthTimeMs;
private final HashMap<String, Long> bandwidthBytes;
@Nullable private String activeSessionId;
@Nullable private PlaybackMetrics.Builder metricsBuilder;
private @Player.DiscontinuityReason int discontinuityReason;
private int currentPlaybackState;
private int currentNetworkType;
@Nullable private PlaybackException pendingPlayerError;
@Nullable private PendingFormatUpdate pendingVideoFormat;
@Nullable private PendingFormatUpdate pendingAudioFormat;
@Nullable private PendingFormatUpdate pendingTextFormat;
@Nullable private Format currentVideoFormat;
@Nullable private Format currentAudioFormat;
@Nullable private Format currentTextFormat;
private boolean isSeeking;
private int ioErrorType;
private boolean hasFatalError;
private int droppedFrames;
private int playedFrames;
private int audioUnderruns;
private boolean reportedEventsForCurrentSession;
/**
* Creates the listener.
*
* @param context A {@link Context}.
*/
private MediaMetricsListener(Context context, PlaybackSession playbackSession) {
context = context.getApplicationContext();
this.context = context;
this.playbackSession = playbackSession;
window = new Timeline.Window();
period = new Timeline.Period();
bandwidthBytes = new HashMap<>();
bandwidthTimeMs = new HashMap<>();
startTimeMs = SystemClock.elapsedRealtime();
currentPlaybackState = PlaybackStateEvent.STATE_NOT_STARTED;
currentNetworkType = NetworkEvent.NETWORK_TYPE_UNKNOWN;
sessionManager = new DefaultPlaybackSessionManager();
sessionManager.setListener(this);
}
/** Returns the {@link LogSessionId} used by this listener. */
public LogSessionId getLogSessionId() {
return playbackSession.getSessionId();
}
// PlaybackSessionManager.Listener implementation.
@Override
public void onSessionCreated(EventTime eventTime, String sessionId) {}
@Override
public void onSessionActive(EventTime eventTime, String sessionId) {
if (eventTime.mediaPeriodId != null && eventTime.mediaPeriodId.isAd()) {
// Ignore ad sessions.
return;
}
finishCurrentSession();
activeSessionId = sessionId;
metricsBuilder =
new PlaybackMetrics.Builder()
.setPlayerName(ExoPlayerLibraryInfo.TAG)
.setPlayerVersion(ExoPlayerLibraryInfo.VERSION);
maybeUpdateTimelineMetadata(eventTime.timeline, eventTime.mediaPeriodId);
}
@Override
public void onAdPlaybackStarted(
EventTime eventTime, String contentSessionId, String adSessionId) {}
@Override
public void onSessionFinished(
EventTime eventTime, String sessionId, boolean automaticTransitionToNextPlayback) {
if ((eventTime.mediaPeriodId != null && eventTime.mediaPeriodId.isAd())
|| !sessionId.equals(activeSessionId)) {
// Ignore ad sessions and other sessions that are finished before becoming active.
} else {
finishCurrentSession();
}
bandwidthTimeMs.remove(sessionId);
bandwidthBytes.remove(sessionId);
}
// AnalyticsListener implementation.
@Override
public void onPositionDiscontinuity(
EventTime eventTime,
Player.PositionInfo oldPosition,
Player.PositionInfo newPosition,
@Player.DiscontinuityReason int reason) {
if (reason == Player.DISCONTINUITY_REASON_SEEK) {
isSeeking = true;
}
discontinuityReason = reason;
}
@Override
public void onVideoDisabled(EventTime eventTime, DecoderCounters decoderCounters) {
// TODO(b/181122234): DecoderCounters are not re-reported at period boundaries.
droppedFrames += decoderCounters.droppedBufferCount;
playedFrames += decoderCounters.renderedOutputBufferCount;
}
@Override
public void onBandwidthEstimate(
EventTime eventTime, int totalLoadTimeMs, long totalBytesLoaded, long bitrateEstimate) {
if (eventTime.mediaPeriodId != null) {
String sessionId =
sessionManager.getSessionForMediaPeriodId(
eventTime.timeline, checkNotNull(eventTime.mediaPeriodId));
@Nullable Long prevBandwidthBytes = bandwidthBytes.get(sessionId);
@Nullable Long prevBandwidthTimeMs = bandwidthTimeMs.get(sessionId);
bandwidthBytes.put(
sessionId, (prevBandwidthBytes == null ? 0 : prevBandwidthBytes) + totalBytesLoaded);
bandwidthTimeMs.put(
sessionId, (prevBandwidthTimeMs == null ? 0 : prevBandwidthTimeMs) + totalLoadTimeMs);
}
}
@Override
public void onDownstreamFormatChanged(EventTime eventTime, MediaLoadData mediaLoadData) {
if (eventTime.mediaPeriodId == null) {
// This event arrived after the media has been removed from the playlist or a custom
// MediaSource forgot to set the right id. Ignore the track change in these cases.
return;
}
PendingFormatUpdate update =
new PendingFormatUpdate(
checkNotNull(mediaLoadData.trackFormat),
mediaLoadData.trackSelectionReason,
sessionManager.getSessionForMediaPeriodId(
eventTime.timeline, checkNotNull(eventTime.mediaPeriodId)));
switch (mediaLoadData.trackType) {
case C.TRACK_TYPE_VIDEO:
case C.TRACK_TYPE_DEFAULT:
pendingVideoFormat = update;
break;
case C.TRACK_TYPE_AUDIO:
pendingAudioFormat = update;
break;
case C.TRACK_TYPE_TEXT:
pendingTextFormat = update;
break;
default:
// Other track type. Ignore.
}
}
@Override
public void onVideoSizeChanged(EventTime eventTime, VideoSize videoSize) {
@Nullable PendingFormatUpdate pendingVideoFormat = this.pendingVideoFormat;
if (pendingVideoFormat != null && pendingVideoFormat.format.height == Format.NO_VALUE) {
Format formatWithHeightAndWidth =
pendingVideoFormat
.format
.buildUpon()
.setWidth(videoSize.width)
.setHeight(videoSize.height)
.build();
this.pendingVideoFormat =
new PendingFormatUpdate(
formatWithHeightAndWidth,
pendingVideoFormat.selectionReason,
pendingVideoFormat.sessionId);
}
}
@Override
public void onLoadError(
EventTime eventTime,
LoadEventInfo loadEventInfo,
MediaLoadData mediaLoadData,
IOException error,
boolean wasCanceled) {
ioErrorType = mediaLoadData.dataType;
}
@Override
public void onPlayerError(EventTime eventTime, PlaybackException error) {
pendingPlayerError = error;
}
@Override
public void onEvents(Player player, Events events) {
if (events.size() == 0) {
return;
}
maybeAddSessions(events);
long realtimeMs = SystemClock.elapsedRealtime();
maybeUpdateMetricsBuilderValues(player, events);
maybeReportPlaybackError(realtimeMs);
maybeReportTrackChanges(player, events, realtimeMs);
maybeReportNetworkChange(realtimeMs);
maybeReportPlaybackStateChange(player, events, realtimeMs);
if (events.contains(AnalyticsListener.EVENT_PLAYER_RELEASED)) {
sessionManager.finishAllSessions(events.getEventTime(EVENT_PLAYER_RELEASED));
}
}
private void maybeAddSessions(Events events) {
for (int i = 0; i < events.size(); i++) {
@EventFlags int event = events.get(i);
EventTime eventTime = events.getEventTime(event);
if (event == EVENT_TIMELINE_CHANGED) {
sessionManager.updateSessionsWithTimelineChange(eventTime);
} else if (event == EVENT_POSITION_DISCONTINUITY) {
sessionManager.updateSessionsWithDiscontinuity(eventTime, discontinuityReason);
} else {
sessionManager.updateSessions(eventTime);
}
}
}
private void maybeUpdateMetricsBuilderValues(Player player, Events events) {
if (events.contains(EVENT_TIMELINE_CHANGED)) {
EventTime eventTime = events.getEventTime(EVENT_TIMELINE_CHANGED);
if (metricsBuilder != null) {
maybeUpdateTimelineMetadata(eventTime.timeline, eventTime.mediaPeriodId);
}
}
if (events.contains(EVENT_TRACKS_CHANGED) && metricsBuilder != null) {
@Nullable DrmInitData drmInitData = getDrmInitData(player.getCurrentTracks().getGroups());
if (drmInitData != null) {
castNonNull(metricsBuilder).setDrmType(getDrmType(drmInitData));
}
}
if (events.contains(EVENT_AUDIO_UNDERRUN)) {
audioUnderruns++;
}
}
private void maybeReportPlaybackError(long realtimeMs) {
@Nullable PlaybackException error = pendingPlayerError;
if (error == null) {
return;
}
ErrorInfo errorInfo =
getErrorInfo(
error, context, /* lastIoErrorForManifest= */ ioErrorType == C.DATA_TYPE_MANIFEST);
playbackSession.reportPlaybackErrorEvent(
new PlaybackErrorEvent.Builder()
.setTimeSinceCreatedMillis(realtimeMs - startTimeMs)
.setErrorCode(errorInfo.errorCode)
.setSubErrorCode(errorInfo.subErrorCode)
.setException(error)
.build());
reportedEventsForCurrentSession = true;
pendingPlayerError = null;
}
private void maybeReportTrackChanges(Player player, Events events, long realtimeMs) {
if (events.contains(EVENT_TRACKS_CHANGED)) {
Tracks tracks = player.getCurrentTracks();
boolean isVideoSelected = tracks.isTypeSelected(C.TRACK_TYPE_VIDEO);
boolean isAudioSelected = tracks.isTypeSelected(C.TRACK_TYPE_AUDIO);
boolean isTextSelected = tracks.isTypeSelected(C.TRACK_TYPE_TEXT);
if (isVideoSelected || isAudioSelected || isTextSelected) {
// Ignore updates with insufficient information where no tracks are selected.
if (!isVideoSelected) {
maybeUpdateVideoFormat(realtimeMs, /* videoFormat= */ null, C.SELECTION_REASON_UNKNOWN);
}
if (!isAudioSelected) {
maybeUpdateAudioFormat(realtimeMs, /* audioFormat= */ null, C.SELECTION_REASON_UNKNOWN);
}
if (!isTextSelected) {
maybeUpdateTextFormat(realtimeMs, /* textFormat= */ null, C.SELECTION_REASON_UNKNOWN);
}
}
}
if (canReportPendingFormatUpdate(pendingVideoFormat)
&& pendingVideoFormat.format.height != Format.NO_VALUE) {
maybeUpdateVideoFormat(
realtimeMs, pendingVideoFormat.format, pendingVideoFormat.selectionReason);
pendingVideoFormat = null;
}
if (canReportPendingFormatUpdate(pendingAudioFormat)) {
maybeUpdateAudioFormat(
realtimeMs, pendingAudioFormat.format, pendingAudioFormat.selectionReason);
pendingAudioFormat = null;
}
if (canReportPendingFormatUpdate(pendingTextFormat)) {
maybeUpdateTextFormat(
realtimeMs, pendingTextFormat.format, pendingTextFormat.selectionReason);
pendingTextFormat = null;
}
}
@EnsuresNonNullIf(result = true, expression = "#1")
private boolean canReportPendingFormatUpdate(@Nullable PendingFormatUpdate pendingFormatUpdate) {
return pendingFormatUpdate != null
&& pendingFormatUpdate.sessionId.equals(sessionManager.getActiveSessionId());
}
private void maybeReportNetworkChange(long realtimeMs) {
int networkType = getNetworkType(context);
if (networkType != currentNetworkType) {
currentNetworkType = networkType;
playbackSession.reportNetworkEvent(
new NetworkEvent.Builder()
.setNetworkType(networkType)
.setTimeSinceCreatedMillis(realtimeMs - startTimeMs)
.build());
}
}
private void maybeReportPlaybackStateChange(Player player, Events events, long realtimeMs) {
if (player.getPlaybackState() != Player.STATE_BUFFERING) {
isSeeking = false;
}
if (player.getPlayerError() == null) {
hasFatalError = false;
} else if (events.contains(EVENT_PLAYER_ERROR)) {
hasFatalError = true;
}
int newPlaybackState = resolveNewPlaybackState(player);
if (currentPlaybackState != newPlaybackState) {
currentPlaybackState = newPlaybackState;
reportedEventsForCurrentSession = true;
playbackSession.reportPlaybackStateEvent(
new PlaybackStateEvent.Builder()
.setState(currentPlaybackState)
.setTimeSinceCreatedMillis(realtimeMs - startTimeMs)
.build());
}
}
private int resolveNewPlaybackState(Player player) {
@Player.State int playerPlaybackState = player.getPlaybackState();
if (isSeeking) {
// Seeking takes precedence over errors such that we report a seek while in error state.
return PlaybackStateEvent.STATE_SEEKING;
} else if (hasFatalError) {
return PlaybackStateEvent.STATE_FAILED;
} else if (playerPlaybackState == Player.STATE_ENDED) {
return PlaybackStateEvent.STATE_ENDED;
} else if (playerPlaybackState == Player.STATE_BUFFERING) {
if (currentPlaybackState == PlaybackStateEvent.STATE_NOT_STARTED
|| currentPlaybackState == PlaybackStateEvent.STATE_JOINING_FOREGROUND) {
return PlaybackStateEvent.STATE_JOINING_FOREGROUND;
}
if (!player.getPlayWhenReady()) {
return PlaybackStateEvent.STATE_PAUSED_BUFFERING;
}
return player.getPlaybackSuppressionReason() != Player.PLAYBACK_SUPPRESSION_REASON_NONE
? PlaybackStateEvent.STATE_SUPPRESSED_BUFFERING
: PlaybackStateEvent.STATE_BUFFERING;
} else if (playerPlaybackState == Player.STATE_READY) {
if (!player.getPlayWhenReady()) {
return PlaybackStateEvent.STATE_PAUSED;
}
return player.getPlaybackSuppressionReason() != Player.PLAYBACK_SUPPRESSION_REASON_NONE
? PlaybackStateEvent.STATE_SUPPRESSED
: PlaybackStateEvent.STATE_PLAYING;
} else if (playerPlaybackState == Player.STATE_IDLE
&& currentPlaybackState != PlaybackStateEvent.STATE_NOT_STARTED) {
// This case only applies for calls to player.stop(). All other IDLE cases are handled by
// !isForeground, hasFatalError or isSuspended. NOT_STARTED is deliberately ignored.
return PlaybackStateEvent.STATE_STOPPED;
}
return currentPlaybackState;
}
private void maybeUpdateVideoFormat(
long realtimeMs, @Nullable Format videoFormat, @C.SelectionReason int trackSelectionReason) {
if (Util.areEqual(currentVideoFormat, videoFormat)) {
return;
}
if (currentVideoFormat == null && trackSelectionReason == C.SELECTION_REASON_UNKNOWN) {
trackSelectionReason = C.SELECTION_REASON_INITIAL;
}
currentVideoFormat = videoFormat;
reportTrackChangeEvent(
TrackChangeEvent.TRACK_TYPE_VIDEO, realtimeMs, videoFormat, trackSelectionReason);
}
private void maybeUpdateAudioFormat(
long realtimeMs, @Nullable Format audioFormat, @C.SelectionReason int trackSelectionReason) {
if (Util.areEqual(currentAudioFormat, audioFormat)) {
return;
}
if (currentAudioFormat == null && trackSelectionReason == C.SELECTION_REASON_UNKNOWN) {
trackSelectionReason = C.SELECTION_REASON_INITIAL;
}
currentAudioFormat = audioFormat;
reportTrackChangeEvent(
TrackChangeEvent.TRACK_TYPE_AUDIO, realtimeMs, audioFormat, trackSelectionReason);
}
private void maybeUpdateTextFormat(
long realtimeMs, @Nullable Format textFormat, @C.SelectionReason int trackSelectionReason) {
if (Util.areEqual(currentTextFormat, textFormat)) {
return;
}
if (currentTextFormat == null && trackSelectionReason == C.SELECTION_REASON_UNKNOWN) {
trackSelectionReason = C.SELECTION_REASON_INITIAL;
}
currentTextFormat = textFormat;
reportTrackChangeEvent(
TrackChangeEvent.TRACK_TYPE_TEXT, realtimeMs, textFormat, trackSelectionReason);
}
private void reportTrackChangeEvent(
int type,
long realtimeMs,
@Nullable Format format,
@C.SelectionReason int trackSelectionReason) {
TrackChangeEvent.Builder builder =
new TrackChangeEvent.Builder(type).setTimeSinceCreatedMillis(realtimeMs - startTimeMs);
if (format != null) {
builder.setTrackState(TrackChangeEvent.TRACK_STATE_ON);
builder.setTrackChangeReason(getTrackChangeReason(trackSelectionReason));
if (format.containerMimeType != null) {
// TODO(b/181121074): Progressive container MIME type is not filled in by MediaSource.
builder.setContainerMimeType(format.containerMimeType);
}
if (format.sampleMimeType != null) {
builder.setSampleMimeType(format.sampleMimeType);
}
if (format.codecs != null) {
builder.setCodecName(format.codecs);
}
if (format.bitrate != Format.NO_VALUE) {
builder.setBitrate(format.bitrate);
}
if (format.width != Format.NO_VALUE) {
builder.setWidth(format.width);
}
if (format.height != Format.NO_VALUE) {
builder.setHeight(format.height);
}
if (format.channelCount != Format.NO_VALUE) {
builder.setChannelCount(format.channelCount);
}
if (format.sampleRate != Format.NO_VALUE) {
builder.setAudioSampleRate(format.sampleRate);
}
if (format.language != null) {
Pair<String, @NullableType String> languageAndRegion =
getLanguageAndRegion(format.language);
builder.setLanguage(languageAndRegion.first);
if (languageAndRegion.second != null) {
builder.setLanguageRegion(languageAndRegion.second);
}
}
if (format.frameRate != Format.NO_VALUE) {
builder.setVideoFrameRate(format.frameRate);
}
} else {
builder.setTrackState(TrackChangeEvent.TRACK_STATE_OFF);
}
reportedEventsForCurrentSession = true;
playbackSession.reportTrackChangeEvent(builder.build());
}
@RequiresNonNull("metricsBuilder")
private void maybeUpdateTimelineMetadata(
Timeline timeline, @Nullable MediaSource.MediaPeriodId mediaPeriodId) {
PlaybackMetrics.Builder metricsBuilder = this.metricsBuilder;
if (mediaPeriodId == null) {
return;
}
int periodIndex = timeline.getIndexOfPeriod(mediaPeriodId.periodUid);
if (periodIndex == C.INDEX_UNSET) {
return;
}
timeline.getPeriod(periodIndex, period);
timeline.getWindow(period.windowIndex, window);
metricsBuilder.setStreamType(getStreamType(window.mediaItem));
if (window.durationUs != C.TIME_UNSET
&& !window.isPlaceholder
&& !window.isDynamic
&& !window.isLive()) {
metricsBuilder.setMediaDurationMillis(window.getDurationMs());
}
metricsBuilder.setPlaybackType(
window.isLive() ? PlaybackMetrics.PLAYBACK_TYPE_LIVE : PlaybackMetrics.PLAYBACK_TYPE_VOD);
reportedEventsForCurrentSession = true;
}
private void finishCurrentSession() {
if (metricsBuilder != null && reportedEventsForCurrentSession) {
metricsBuilder.setAudioUnderrunCount(audioUnderruns);
metricsBuilder.setVideoFramesDropped(droppedFrames);
metricsBuilder.setVideoFramesPlayed(playedFrames);
@Nullable Long networkTimeMs = bandwidthTimeMs.get(activeSessionId);
metricsBuilder.setNetworkTransferDurationMillis(networkTimeMs == null ? 0 : networkTimeMs);
// TODO(b/181121847): Report localBytesRead. This requires additional callbacks or plumbing.
@Nullable Long networkBytes = bandwidthBytes.get(activeSessionId);
metricsBuilder.setNetworkBytesRead(networkBytes == null ? 0 : networkBytes);
// TODO(b/181121847): Detect stream sources mixed and local depending on localBytesRead.
metricsBuilder.setStreamSource(
networkBytes != null && networkBytes > 0
? PlaybackMetrics.STREAM_SOURCE_NETWORK
: PlaybackMetrics.STREAM_SOURCE_UNKNOWN);
playbackSession.reportPlaybackMetrics(metricsBuilder.build());
}
metricsBuilder = null;
activeSessionId = null;
audioUnderruns = 0;
droppedFrames = 0;
playedFrames = 0;
currentVideoFormat = null;
currentAudioFormat = null;
currentTextFormat = null;
reportedEventsForCurrentSession = false;
}
private static int getTrackChangeReason(@C.SelectionReason int trackSelectionReason) {
switch (trackSelectionReason) {
case C.SELECTION_REASON_INITIAL:
return TrackChangeEvent.TRACK_CHANGE_REASON_INITIAL;
case C.SELECTION_REASON_ADAPTIVE:
return TrackChangeEvent.TRACK_CHANGE_REASON_ADAPTIVE;
case C.SELECTION_REASON_MANUAL:
return TrackChangeEvent.TRACK_CHANGE_REASON_MANUAL;
case C.SELECTION_REASON_TRICK_PLAY:
case C.SELECTION_REASON_UNKNOWN:
default:
return TrackChangeEvent.TRACK_CHANGE_REASON_OTHER;
}
}
private static Pair<String, @NullableType String> getLanguageAndRegion(String languageCode) {
String[] parts = Util.split(languageCode, "-");
return Pair.create(parts[0], parts.length >= 2 ? parts[1] : null);
}
private static int getNetworkType(Context context) {
switch (NetworkTypeObserver.getInstance(context).getNetworkType()) {
case C.NETWORK_TYPE_WIFI:
return NetworkEvent.NETWORK_TYPE_WIFI;
case C.NETWORK_TYPE_2G:
return NetworkEvent.NETWORK_TYPE_2G;
case C.NETWORK_TYPE_3G:
return NetworkEvent.NETWORK_TYPE_3G;
case C.NETWORK_TYPE_4G:
return NetworkEvent.NETWORK_TYPE_4G;
case C.NETWORK_TYPE_5G_SA:
return NetworkEvent.NETWORK_TYPE_5G_SA;
case C.NETWORK_TYPE_5G_NSA:
return NetworkEvent.NETWORK_TYPE_5G_NSA;
case C.NETWORK_TYPE_ETHERNET:
return NetworkEvent.NETWORK_TYPE_ETHERNET;
case C.NETWORK_TYPE_OFFLINE:
return NetworkEvent.NETWORK_TYPE_OFFLINE;
case C.NETWORK_TYPE_UNKNOWN:
return NetworkEvent.NETWORK_TYPE_UNKNOWN;
default:
return NetworkEvent.NETWORK_TYPE_OTHER;
}
}
private static int getStreamType(MediaItem mediaItem) {
if (mediaItem.localConfiguration == null) {
return PlaybackMetrics.STREAM_TYPE_UNKNOWN;
}
@ContentType
int contentType =
Util.inferContentTypeForUriAndMimeType(
mediaItem.localConfiguration.uri, mediaItem.localConfiguration.mimeType);
switch (contentType) {
case C.CONTENT_TYPE_HLS:
return PlaybackMetrics.STREAM_TYPE_HLS;
case C.CONTENT_TYPE_DASH:
return PlaybackMetrics.STREAM_TYPE_DASH;
case C.CONTENT_TYPE_SS:
return PlaybackMetrics.STREAM_TYPE_SS;
case C.CONTENT_TYPE_RTSP:
default:
return PlaybackMetrics.STREAM_TYPE_OTHER;
}
}
private static ErrorInfo getErrorInfo(
PlaybackException error, Context context, boolean lastIoErrorForManifest) {
if (error.errorCode == PlaybackException.ERROR_CODE_REMOTE_ERROR) {
return new ErrorInfo(PlaybackErrorEvent.ERROR_PLAYER_REMOTE, /* subErrorCode= */ 0);
}
// Unpack the PlaybackException.
// TODO(b/190203080): Use error codes instead of the Exception's cause where possible.
boolean isRendererExoPlaybackException = false;
int rendererFormatSupport = C.FORMAT_UNSUPPORTED_TYPE;
if (error instanceof ExoPlaybackException) {
ExoPlaybackException exoPlaybackException = (ExoPlaybackException) error;
isRendererExoPlaybackException =
exoPlaybackException.type == ExoPlaybackException.TYPE_RENDERER;
rendererFormatSupport = exoPlaybackException.rendererFormatSupport;
}
Throwable cause = checkNotNull(error.getCause());
if (cause instanceof IOException) {
if (cause instanceof HttpDataSource.InvalidResponseCodeException) {
int responseCode = ((HttpDataSource.InvalidResponseCodeException) cause).responseCode;
return new ErrorInfo(
PlaybackErrorEvent.ERROR_IO_BAD_HTTP_STATUS, /* subErrorCode= */ responseCode);
} else if (cause instanceof HttpDataSource.InvalidContentTypeException
|| cause instanceof ParserException) {
return new ErrorInfo(
lastIoErrorForManifest
? PlaybackErrorEvent.ERROR_PARSING_MANIFEST_MALFORMED
: PlaybackErrorEvent.ERROR_PARSING_CONTAINER_MALFORMED,
/* subErrorCode= */ 0);
} else if (cause instanceof HttpDataSource.HttpDataSourceException
|| cause instanceof UdpDataSource.UdpDataSourceException) {
if (NetworkTypeObserver.getInstance(context).getNetworkType() == C.NETWORK_TYPE_OFFLINE) {
return new ErrorInfo(
PlaybackErrorEvent.ERROR_IO_NETWORK_UNAVAILABLE, /* subErrorCode= */ 0);
} else {
@Nullable Throwable detailedCause = cause.getCause();
if (detailedCause instanceof UnknownHostException) {
return new ErrorInfo(PlaybackErrorEvent.ERROR_IO_DNS_FAILED, /* subErrorCode= */ 0);
} else if (detailedCause instanceof SocketTimeoutException) {
return new ErrorInfo(
PlaybackErrorEvent.ERROR_IO_CONNECTION_TIMEOUT, /* subErrorCode= */ 0);
} else if (cause instanceof HttpDataSource.HttpDataSourceException
&& ((HttpDataSource.HttpDataSourceException) cause).type
== HttpDataSource.HttpDataSourceException.TYPE_OPEN) {
return new ErrorInfo(
PlaybackErrorEvent.ERROR_IO_NETWORK_CONNECTION_FAILED, /* subErrorCode= */ 0);
} else {
return new ErrorInfo(
PlaybackErrorEvent.ERROR_IO_CONNECTION_CLOSED, /* subErrorCode= */ 0);
}
}
} else if (error.errorCode == PlaybackException.ERROR_CODE_BEHIND_LIVE_WINDOW) {
return new ErrorInfo(
PlaybackErrorEvent.ERROR_PLAYER_BEHIND_LIVE_WINDOW, /* subErrorCode= */ 0);
} else if (cause instanceof DrmSession.DrmSessionException) {
// Unpack DrmSessionException.
cause = checkNotNull(cause.getCause());
if (Util.SDK_INT >= 21 && cause instanceof MediaDrm.MediaDrmStateException) {
String diagnosticsInfo = ((MediaDrm.MediaDrmStateException) cause).getDiagnosticInfo();
int subErrorCode = Util.getErrorCodeFromPlatformDiagnosticsInfo(diagnosticsInfo);
int errorCode = getDrmErrorCode(subErrorCode);
return new ErrorInfo(errorCode, subErrorCode);
} else if (Util.SDK_INT >= 23 && cause instanceof MediaDrmResetException) {
return new ErrorInfo(PlaybackErrorEvent.ERROR_DRM_SYSTEM_ERROR, /* subErrorCode= */ 0);
} else if (Util.SDK_INT >= 18 && cause instanceof NotProvisionedException) {
return new ErrorInfo(
PlaybackErrorEvent.ERROR_DRM_PROVISIONING_FAILED, /* subErrorCode= */ 0);
} else if (Util.SDK_INT >= 18 && cause instanceof DeniedByServerException) {
return new ErrorInfo(PlaybackErrorEvent.ERROR_DRM_DEVICE_REVOKED, /* subErrorCode= */ 0);
} else if (cause instanceof UnsupportedDrmException) {
return new ErrorInfo(
PlaybackErrorEvent.ERROR_DRM_SCHEME_UNSUPPORTED, /* subErrorCode= */ 0);
} else if (cause instanceof DefaultDrmSessionManager.MissingSchemeDataException) {
return new ErrorInfo(PlaybackErrorEvent.ERROR_DRM_CONTENT_ERROR, /* subErrorCode= */ 0);
} else {
return new ErrorInfo(PlaybackErrorEvent.ERROR_DRM_OTHER, /* subErrorCode= */ 0);
}
} else if (cause instanceof FileDataSource.FileDataSourceException
&& cause.getCause() instanceof FileNotFoundException) {
@Nullable Throwable notFoundCause = checkNotNull(cause.getCause()).getCause();
if (Util.SDK_INT >= 21
&& notFoundCause instanceof ErrnoException
&& ((ErrnoException) notFoundCause).errno == OsConstants.EACCES) {
return new ErrorInfo(PlaybackErrorEvent.ERROR_IO_NO_PERMISSION, /* subErrorCode= */ 0);
} else {
return new ErrorInfo(PlaybackErrorEvent.ERROR_IO_FILE_NOT_FOUND, /* subErrorCode= */ 0);
}
} else {
return new ErrorInfo(PlaybackErrorEvent.ERROR_IO_OTHER, /* subErrorCode= */ 0);
}
} else if (isRendererExoPlaybackException
&& (rendererFormatSupport == C.FORMAT_UNSUPPORTED_TYPE
|| rendererFormatSupport == C.FORMAT_UNSUPPORTED_SUBTYPE)) {
return new ErrorInfo(
PlaybackErrorEvent.ERROR_DECODING_FORMAT_UNSUPPORTED, /* subErrorCode= */ 0);
} else if (isRendererExoPlaybackException
&& rendererFormatSupport == C.FORMAT_EXCEEDS_CAPABILITIES) {
return new ErrorInfo(
PlaybackErrorEvent.ERROR_DECODING_FORMAT_EXCEEDS_CAPABILITIES, /* subErrorCode= */ 0);
} else if (isRendererExoPlaybackException
&& rendererFormatSupport == C.FORMAT_UNSUPPORTED_DRM) {
return new ErrorInfo(PlaybackErrorEvent.ERROR_DRM_SCHEME_UNSUPPORTED, /* subErrorCode= */ 0);
} else if (cause instanceof MediaCodecRenderer.DecoderInitializationException) {
@Nullable
String diagnosticsInfo =
((MediaCodecRenderer.DecoderInitializationException) cause).diagnosticInfo;
int subErrorCode = Util.getErrorCodeFromPlatformDiagnosticsInfo(diagnosticsInfo);
return new ErrorInfo(PlaybackErrorEvent.ERROR_DECODER_INIT_FAILED, subErrorCode);
} else if (cause instanceof MediaCodecDecoderException) {
@Nullable String diagnosticsInfo = ((MediaCodecDecoderException) cause).diagnosticInfo;
int subErrorCode = Util.getErrorCodeFromPlatformDiagnosticsInfo(diagnosticsInfo);
return new ErrorInfo(PlaybackErrorEvent.ERROR_DECODING_FAILED, subErrorCode);
} else if (cause instanceof OutOfMemoryError) {
return new ErrorInfo(PlaybackErrorEvent.ERROR_DECODING_FAILED, /* subErrorCode= */ 0);
} else if (cause instanceof AudioSink.InitializationException) {
int subErrorCode = ((AudioSink.InitializationException) cause).audioTrackState;
return new ErrorInfo(PlaybackErrorEvent.ERROR_AUDIO_TRACK_INIT_FAILED, subErrorCode);
} else if (cause instanceof AudioSink.WriteException) {
int subErrorCode = ((AudioSink.WriteException) cause).errorCode;
return new ErrorInfo(PlaybackErrorEvent.ERROR_AUDIO_TRACK_WRITE_FAILED, subErrorCode);
} else if (Util.SDK_INT >= 16 && cause instanceof MediaCodec.CryptoException) {
int subErrorCode = ((MediaCodec.CryptoException) cause).getErrorCode();
int errorCode = getDrmErrorCode(subErrorCode);
return new ErrorInfo(errorCode, subErrorCode);
} else {
return new ErrorInfo(PlaybackErrorEvent.ERROR_PLAYER_OTHER, /* subErrorCode= */ 0);
}
}
@Nullable
private static DrmInitData getDrmInitData(ImmutableList<Tracks.Group> trackGroups) {
for (Tracks.Group trackGroup : trackGroups) {
for (int trackIndex = 0; trackIndex < trackGroup.length; trackIndex++) {
if (trackGroup.isTrackSelected(trackIndex)) {
@Nullable DrmInitData drmInitData = trackGroup.getTrackFormat(trackIndex).drmInitData;
if (drmInitData != null) {
return drmInitData;
}
}
}
}
return null;
}
private static int getDrmType(DrmInitData drmInitData) {
for (int i = 0; i < drmInitData.schemeDataCount; i++) {
UUID uuid = drmInitData.get(i).uuid;
if (uuid.equals(C.WIDEVINE_UUID)) {
// TODO(b/77625596): Forward MediaDrm metrics to distinguish between L1 and L3 and to set
// the drm session id.
return PlaybackMetrics.DRM_TYPE_WIDEVINE_L1;
}
if (uuid.equals(C.PLAYREADY_UUID)) {
return PlaybackMetrics.DRM_TYPE_PLAY_READY;
}
if (uuid.equals(C.CLEARKEY_UUID)) {
return PlaybackMetrics.DRM_TYPE_CLEARKEY;
}
}
return PlaybackMetrics.DRM_TYPE_OTHER;
}
@SuppressLint("SwitchIntDef") // Only DRM error codes are relevant here.
private static int getDrmErrorCode(int mediaDrmErrorCode) {
switch (Util.getErrorCodeForMediaDrmErrorCode(mediaDrmErrorCode)) {
case PlaybackException.ERROR_CODE_DRM_PROVISIONING_FAILED:
return PlaybackErrorEvent.ERROR_DRM_PROVISIONING_FAILED;
case PlaybackException.ERROR_CODE_DRM_LICENSE_ACQUISITION_FAILED:
return PlaybackErrorEvent.ERROR_DRM_LICENSE_ACQUISITION_FAILED;
case PlaybackException.ERROR_CODE_DRM_DISALLOWED_OPERATION:
return PlaybackErrorEvent.ERROR_DRM_DISALLOWED_OPERATION;
case PlaybackException.ERROR_CODE_DRM_CONTENT_ERROR:
return PlaybackErrorEvent.ERROR_DRM_CONTENT_ERROR;
case PlaybackException.ERROR_CODE_DRM_SYSTEM_ERROR:
default:
return PlaybackErrorEvent.ERROR_DRM_SYSTEM_ERROR;
}
}
private static final class ErrorInfo {
public final int errorCode;
public final int subErrorCode;
public ErrorInfo(int errorCode, int subErrorCode) {
this.errorCode = errorCode;
this.subErrorCode = subErrorCode;
}
}
private static final class PendingFormatUpdate {
public final Format format;
public final @C.SelectionReason int selectionReason;
public final String sessionId;
public PendingFormatUpdate(
Format format, @C.SelectionReason int selectionReason, String sessionId) {
this.format = format;
this.selectionReason = selectionReason;
this.sessionId = sessionId;
}
}
}
|
googleapis/google-api-java-client-services | 38,274 | clients/google-api-services-drive/v2/1.31.0/com/google/api/services/drive/model/TeamDrive.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.drive.model;
/**
* Deprecated: use the drive collection instead.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Drive API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class TeamDrive extends com.google.api.client.json.GenericJson {
/**
* An image file and cropping parameters from which a background image for this Team Drive is set.
* This is a write only field; it can only be set on drive.teamdrives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private BackgroundImageFile backgroundImageFile;
/**
* A short-lived link to this Team Drive's background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String backgroundImageLink;
/**
* Capabilities the current user has on this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Capabilities capabilities;
/**
* The color of this Team Drive as an RGB hex string. It can only be set on a
* drive.teamdrives.update request that does not set themeId.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String colorRgb;
/**
* The time at which the Team Drive was created (RFC 3339 date-time).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private com.google.api.client.util.DateTime createdDate;
/**
* The ID of this Team Drive which is also the ID of the top level folder of this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* This is always drive#teamDrive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The name of this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* The organizational unit of this shared drive. This field is only populated on drives.list
* responses when the useDomainAdminAccess parameter is set to true.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String orgUnitId;
/**
* A set of restrictions that apply to this Team Drive or items inside this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Restrictions restrictions;
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* teamDriveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.teamdrives.insert request, a random theme is chosen from which the background image and
* color are set. This is a write-only field; it can only be set on requests that don't set
* colorRgb or backgroundImageFile.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String themeId;
/**
* An image file and cropping parameters from which a background image for this Team Drive is set.
* This is a write only field; it can only be set on drive.teamdrives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @return value or {@code null} for none
*/
public BackgroundImageFile getBackgroundImageFile() {
return backgroundImageFile;
}
/**
* An image file and cropping parameters from which a background image for this Team Drive is set.
* This is a write only field; it can only be set on drive.teamdrives.update requests that don't
* set themeId. When specified, all fields of the backgroundImageFile must be set.
* @param backgroundImageFile backgroundImageFile or {@code null} for none
*/
public TeamDrive setBackgroundImageFile(BackgroundImageFile backgroundImageFile) {
this.backgroundImageFile = backgroundImageFile;
return this;
}
/**
* A short-lived link to this Team Drive's background image.
* @return value or {@code null} for none
*/
public java.lang.String getBackgroundImageLink() {
return backgroundImageLink;
}
/**
* A short-lived link to this Team Drive's background image.
* @param backgroundImageLink backgroundImageLink or {@code null} for none
*/
public TeamDrive setBackgroundImageLink(java.lang.String backgroundImageLink) {
this.backgroundImageLink = backgroundImageLink;
return this;
}
/**
* Capabilities the current user has on this Team Drive.
* @return value or {@code null} for none
*/
public Capabilities getCapabilities() {
return capabilities;
}
/**
* Capabilities the current user has on this Team Drive.
* @param capabilities capabilities or {@code null} for none
*/
public TeamDrive setCapabilities(Capabilities capabilities) {
this.capabilities = capabilities;
return this;
}
/**
* The color of this Team Drive as an RGB hex string. It can only be set on a
* drive.teamdrives.update request that does not set themeId.
* @return value or {@code null} for none
*/
public java.lang.String getColorRgb() {
return colorRgb;
}
/**
* The color of this Team Drive as an RGB hex string. It can only be set on a
* drive.teamdrives.update request that does not set themeId.
* @param colorRgb colorRgb or {@code null} for none
*/
public TeamDrive setColorRgb(java.lang.String colorRgb) {
this.colorRgb = colorRgb;
return this;
}
/**
* The time at which the Team Drive was created (RFC 3339 date-time).
* @return value or {@code null} for none
*/
public com.google.api.client.util.DateTime getCreatedDate() {
return createdDate;
}
/**
* The time at which the Team Drive was created (RFC 3339 date-time).
* @param createdDate createdDate or {@code null} for none
*/
public TeamDrive setCreatedDate(com.google.api.client.util.DateTime createdDate) {
this.createdDate = createdDate;
return this;
}
/**
* The ID of this Team Drive which is also the ID of the top level folder of this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of this Team Drive which is also the ID of the top level folder of this Team Drive.
* @param id id or {@code null} for none
*/
public TeamDrive setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* This is always drive#teamDrive
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* This is always drive#teamDrive
* @param kind kind or {@code null} for none
*/
public TeamDrive setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The name of this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* The name of this Team Drive.
* @param name name or {@code null} for none
*/
public TeamDrive setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* The organizational unit of this shared drive. This field is only populated on drives.list
* responses when the useDomainAdminAccess parameter is set to true.
* @return value or {@code null} for none
*/
public java.lang.String getOrgUnitId() {
return orgUnitId;
}
/**
* The organizational unit of this shared drive. This field is only populated on drives.list
* responses when the useDomainAdminAccess parameter is set to true.
* @param orgUnitId orgUnitId or {@code null} for none
*/
public TeamDrive setOrgUnitId(java.lang.String orgUnitId) {
this.orgUnitId = orgUnitId;
return this;
}
/**
* A set of restrictions that apply to this Team Drive or items inside this Team Drive.
* @return value or {@code null} for none
*/
public Restrictions getRestrictions() {
return restrictions;
}
/**
* A set of restrictions that apply to this Team Drive or items inside this Team Drive.
* @param restrictions restrictions or {@code null} for none
*/
public TeamDrive setRestrictions(Restrictions restrictions) {
this.restrictions = restrictions;
return this;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* teamDriveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.teamdrives.insert request, a random theme is chosen from which the background image and
* color are set. This is a write-only field; it can only be set on requests that don't set
* colorRgb or backgroundImageFile.
* @return value or {@code null} for none
*/
public java.lang.String getThemeId() {
return themeId;
}
/**
* The ID of the theme from which the background image and color will be set. The set of possible
* teamDriveThemes can be retrieved from a drive.about.get response. When not specified on a
* drive.teamdrives.insert request, a random theme is chosen from which the background image and
* color are set. This is a write-only field; it can only be set on requests that don't set
* colorRgb or backgroundImageFile.
* @param themeId themeId or {@code null} for none
*/
public TeamDrive setThemeId(java.lang.String themeId) {
this.themeId = themeId;
return this;
}
@Override
public TeamDrive set(String fieldName, Object value) {
return (TeamDrive) super.set(fieldName, value);
}
@Override
public TeamDrive clone() {
return (TeamDrive) super.clone();
}
/**
* An image file and cropping parameters from which a background image for this Team Drive is set.
* This is a write only field; it can only be set on drive.teamdrives.update requests that don't set
* themeId. When specified, all fields of the backgroundImageFile must be set.
*/
public static final class BackgroundImageFile extends com.google.api.client.json.GenericJson {
/**
* The ID of an image file in Drive to use for the background image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float width;
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float xCoordinate;
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float yCoordinate;
/**
* The ID of an image file in Drive to use for the background image.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of an image file in Drive to use for the background image.
* @param id id or {@code null} for none
*/
public BackgroundImageFile setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @return value or {@code null} for none
*/
public java.lang.Float getWidth() {
return width;
}
/**
* The width of the cropped image in the closed range of 0 to 1. This value represents the width
* of the cropped image divided by the width of the entire image. The height is computed by
* applying a width to height aspect ratio of 80 to 9. The resulting image must be at least 1280
* pixels wide and 144 pixels high.
* @param width width or {@code null} for none
*/
public BackgroundImageFile setWidth(java.lang.Float width) {
this.width = width;
return this;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getXCoordinate() {
return xCoordinate;
}
/**
* The X coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the horizontal distance from the
* left side of the entire image to the left side of the cropping area divided by the width of the
* entire image.
* @param xCoordinate xCoordinate or {@code null} for none
*/
public BackgroundImageFile setXCoordinate(java.lang.Float xCoordinate) {
this.xCoordinate = xCoordinate;
return this;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @return value or {@code null} for none
*/
public java.lang.Float getYCoordinate() {
return yCoordinate;
}
/**
* The Y coordinate of the upper left corner of the cropping area in the background image. This is
* a value in the closed range of 0 to 1. This value represents the vertical distance from the top
* side of the entire image to the top side of the cropping area divided by the height of the
* entire image.
* @param yCoordinate yCoordinate or {@code null} for none
*/
public BackgroundImageFile setYCoordinate(java.lang.Float yCoordinate) {
this.yCoordinate = yCoordinate;
return this;
}
@Override
public BackgroundImageFile set(String fieldName, Object value) {
return (BackgroundImageFile) super.set(fieldName, value);
}
@Override
public BackgroundImageFile clone() {
return (BackgroundImageFile) super.clone();
}
}
/**
* Capabilities the current user has on this Team Drive.
*/
public static final class Capabilities extends com.google.api.client.json.GenericJson {
/**
* Whether the current user can add children to folders in this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canAddChildren;
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this Team
* Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction;
/**
* Whether the current user can change the domainUsersOnly restriction of this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeDomainUsersOnlyRestriction;
/**
* Whether the current user can change the background of this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeTeamDriveBackground;
/**
* Whether the current user can change the teamMembersOnly restriction of this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canChangeTeamMembersOnlyRestriction;
/**
* Whether the current user can comment on files in this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canComment;
/**
* Whether the current user can copy files in this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canCopy;
/**
* Whether the current user can delete children from folders in this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteChildren;
/**
* Whether the current user can delete this Team Drive. Attempting to delete the Team Drive may
* still fail if there are untrashed items inside the Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDeleteTeamDrive;
/**
* Whether the current user can download files in this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canDownload;
/**
* Whether the current user can edit files in this Team Drive
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canEdit;
/**
* Whether the current user can list the children of folders in this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canListChildren;
/**
* Whether the current user can add members to this Team Drive or remove them or change their
* role.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canManageMembers;
/**
* Whether the current user can read the revisions resource of files in this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canReadRevisions;
/**
* Deprecated - use canDeleteChildren or canTrashChildren instead.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRemoveChildren;
/**
* Whether the current user can rename files or folders in this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRename;
/**
* Whether the current user can rename this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canRenameTeamDrive;
/**
* Whether the current user can reset the Team Drive restrictions to defaults.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canResetTeamDriveRestrictions;
/**
* Whether the current user can share files or folders in this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canShare;
/**
* Whether the current user can trash children from folders in this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean canTrashChildren;
/**
* Whether the current user can add children to folders in this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanAddChildren() {
return canAddChildren;
}
/**
* Whether the current user can add children to folders in this Team Drive.
* @param canAddChildren canAddChildren or {@code null} for none
*/
public Capabilities setCanAddChildren(java.lang.Boolean canAddChildren) {
this.canAddChildren = canAddChildren;
return this;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this Team
* Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeCopyRequiresWriterPermissionRestriction() {
return canChangeCopyRequiresWriterPermissionRestriction;
}
/**
* Whether the current user can change the copyRequiresWriterPermission restriction of this Team
* Drive.
* @param canChangeCopyRequiresWriterPermissionRestriction canChangeCopyRequiresWriterPermissionRestriction or {@code null} for none
*/
public Capabilities setCanChangeCopyRequiresWriterPermissionRestriction(java.lang.Boolean canChangeCopyRequiresWriterPermissionRestriction) {
this.canChangeCopyRequiresWriterPermissionRestriction = canChangeCopyRequiresWriterPermissionRestriction;
return this;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeDomainUsersOnlyRestriction() {
return canChangeDomainUsersOnlyRestriction;
}
/**
* Whether the current user can change the domainUsersOnly restriction of this Team Drive.
* @param canChangeDomainUsersOnlyRestriction canChangeDomainUsersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeDomainUsersOnlyRestriction(java.lang.Boolean canChangeDomainUsersOnlyRestriction) {
this.canChangeDomainUsersOnlyRestriction = canChangeDomainUsersOnlyRestriction;
return this;
}
/**
* Whether the current user can change the background of this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeTeamDriveBackground() {
return canChangeTeamDriveBackground;
}
/**
* Whether the current user can change the background of this Team Drive.
* @param canChangeTeamDriveBackground canChangeTeamDriveBackground or {@code null} for none
*/
public Capabilities setCanChangeTeamDriveBackground(java.lang.Boolean canChangeTeamDriveBackground) {
this.canChangeTeamDriveBackground = canChangeTeamDriveBackground;
return this;
}
/**
* Whether the current user can change the teamMembersOnly restriction of this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanChangeTeamMembersOnlyRestriction() {
return canChangeTeamMembersOnlyRestriction;
}
/**
* Whether the current user can change the teamMembersOnly restriction of this Team Drive.
* @param canChangeTeamMembersOnlyRestriction canChangeTeamMembersOnlyRestriction or {@code null} for none
*/
public Capabilities setCanChangeTeamMembersOnlyRestriction(java.lang.Boolean canChangeTeamMembersOnlyRestriction) {
this.canChangeTeamMembersOnlyRestriction = canChangeTeamMembersOnlyRestriction;
return this;
}
/**
* Whether the current user can comment on files in this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanComment() {
return canComment;
}
/**
* Whether the current user can comment on files in this Team Drive.
* @param canComment canComment or {@code null} for none
*/
public Capabilities setCanComment(java.lang.Boolean canComment) {
this.canComment = canComment;
return this;
}
/**
* Whether the current user can copy files in this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanCopy() {
return canCopy;
}
/**
* Whether the current user can copy files in this Team Drive.
* @param canCopy canCopy or {@code null} for none
*/
public Capabilities setCanCopy(java.lang.Boolean canCopy) {
this.canCopy = canCopy;
return this;
}
/**
* Whether the current user can delete children from folders in this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteChildren() {
return canDeleteChildren;
}
/**
* Whether the current user can delete children from folders in this Team Drive.
* @param canDeleteChildren canDeleteChildren or {@code null} for none
*/
public Capabilities setCanDeleteChildren(java.lang.Boolean canDeleteChildren) {
this.canDeleteChildren = canDeleteChildren;
return this;
}
/**
* Whether the current user can delete this Team Drive. Attempting to delete the Team Drive may
* still fail if there are untrashed items inside the Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDeleteTeamDrive() {
return canDeleteTeamDrive;
}
/**
* Whether the current user can delete this Team Drive. Attempting to delete the Team Drive may
* still fail if there are untrashed items inside the Team Drive.
* @param canDeleteTeamDrive canDeleteTeamDrive or {@code null} for none
*/
public Capabilities setCanDeleteTeamDrive(java.lang.Boolean canDeleteTeamDrive) {
this.canDeleteTeamDrive = canDeleteTeamDrive;
return this;
}
/**
* Whether the current user can download files in this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanDownload() {
return canDownload;
}
/**
* Whether the current user can download files in this Team Drive.
* @param canDownload canDownload or {@code null} for none
*/
public Capabilities setCanDownload(java.lang.Boolean canDownload) {
this.canDownload = canDownload;
return this;
}
/**
* Whether the current user can edit files in this Team Drive
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanEdit() {
return canEdit;
}
/**
* Whether the current user can edit files in this Team Drive
* @param canEdit canEdit or {@code null} for none
*/
public Capabilities setCanEdit(java.lang.Boolean canEdit) {
this.canEdit = canEdit;
return this;
}
/**
* Whether the current user can list the children of folders in this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanListChildren() {
return canListChildren;
}
/**
* Whether the current user can list the children of folders in this Team Drive.
* @param canListChildren canListChildren or {@code null} for none
*/
public Capabilities setCanListChildren(java.lang.Boolean canListChildren) {
this.canListChildren = canListChildren;
return this;
}
/**
* Whether the current user can add members to this Team Drive or remove them or change their
* role.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanManageMembers() {
return canManageMembers;
}
/**
* Whether the current user can add members to this Team Drive or remove them or change their
* role.
* @param canManageMembers canManageMembers or {@code null} for none
*/
public Capabilities setCanManageMembers(java.lang.Boolean canManageMembers) {
this.canManageMembers = canManageMembers;
return this;
}
/**
* Whether the current user can read the revisions resource of files in this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanReadRevisions() {
return canReadRevisions;
}
/**
* Whether the current user can read the revisions resource of files in this Team Drive.
* @param canReadRevisions canReadRevisions or {@code null} for none
*/
public Capabilities setCanReadRevisions(java.lang.Boolean canReadRevisions) {
this.canReadRevisions = canReadRevisions;
return this;
}
/**
* Deprecated - use canDeleteChildren or canTrashChildren instead.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRemoveChildren() {
return canRemoveChildren;
}
/**
* Deprecated - use canDeleteChildren or canTrashChildren instead.
* @param canRemoveChildren canRemoveChildren or {@code null} for none
*/
public Capabilities setCanRemoveChildren(java.lang.Boolean canRemoveChildren) {
this.canRemoveChildren = canRemoveChildren;
return this;
}
/**
* Whether the current user can rename files or folders in this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRename() {
return canRename;
}
/**
* Whether the current user can rename files or folders in this Team Drive.
* @param canRename canRename or {@code null} for none
*/
public Capabilities setCanRename(java.lang.Boolean canRename) {
this.canRename = canRename;
return this;
}
/**
* Whether the current user can rename this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanRenameTeamDrive() {
return canRenameTeamDrive;
}
/**
* Whether the current user can rename this Team Drive.
* @param canRenameTeamDrive canRenameTeamDrive or {@code null} for none
*/
public Capabilities setCanRenameTeamDrive(java.lang.Boolean canRenameTeamDrive) {
this.canRenameTeamDrive = canRenameTeamDrive;
return this;
}
/**
* Whether the current user can reset the Team Drive restrictions to defaults.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanResetTeamDriveRestrictions() {
return canResetTeamDriveRestrictions;
}
/**
* Whether the current user can reset the Team Drive restrictions to defaults.
* @param canResetTeamDriveRestrictions canResetTeamDriveRestrictions or {@code null} for none
*/
public Capabilities setCanResetTeamDriveRestrictions(java.lang.Boolean canResetTeamDriveRestrictions) {
this.canResetTeamDriveRestrictions = canResetTeamDriveRestrictions;
return this;
}
/**
* Whether the current user can share files or folders in this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanShare() {
return canShare;
}
/**
* Whether the current user can share files or folders in this Team Drive.
* @param canShare canShare or {@code null} for none
*/
public Capabilities setCanShare(java.lang.Boolean canShare) {
this.canShare = canShare;
return this;
}
/**
* Whether the current user can trash children from folders in this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCanTrashChildren() {
return canTrashChildren;
}
/**
* Whether the current user can trash children from folders in this Team Drive.
* @param canTrashChildren canTrashChildren or {@code null} for none
*/
public Capabilities setCanTrashChildren(java.lang.Boolean canTrashChildren) {
this.canTrashChildren = canTrashChildren;
return this;
}
@Override
public Capabilities set(String fieldName, Object value) {
return (Capabilities) super.set(fieldName, value);
}
@Override
public Capabilities clone() {
return (Capabilities) super.clone();
}
}
/**
* A set of restrictions that apply to this Team Drive or items inside this Team Drive.
*/
public static final class Restrictions extends com.google.api.client.json.GenericJson {
/**
* Whether administrative privileges on this Team Drive are required to modify restrictions.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean adminManagedRestrictions;
/**
* Whether the options to copy, print, or download files inside this Team Drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean copyRequiresWriterPermission;
/**
* Whether access to this Team Drive and items inside this Team Drive is restricted to users of
* the domain to which this Team Drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean domainUsersOnly;
/**
* Whether access to items inside this Team Drive is restricted to members of this Team Drive.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean teamMembersOnly;
/**
* Whether administrative privileges on this Team Drive are required to modify restrictions.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAdminManagedRestrictions() {
return adminManagedRestrictions;
}
/**
* Whether administrative privileges on this Team Drive are required to modify restrictions.
* @param adminManagedRestrictions adminManagedRestrictions or {@code null} for none
*/
public Restrictions setAdminManagedRestrictions(java.lang.Boolean adminManagedRestrictions) {
this.adminManagedRestrictions = adminManagedRestrictions;
return this;
}
/**
* Whether the options to copy, print, or download files inside this Team Drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getCopyRequiresWriterPermission() {
return copyRequiresWriterPermission;
}
/**
* Whether the options to copy, print, or download files inside this Team Drive, should be
* disabled for readers and commenters. When this restriction is set to true, it will override the
* similarly named field to true for any file inside this Team Drive.
* @param copyRequiresWriterPermission copyRequiresWriterPermission or {@code null} for none
*/
public Restrictions setCopyRequiresWriterPermission(java.lang.Boolean copyRequiresWriterPermission) {
this.copyRequiresWriterPermission = copyRequiresWriterPermission;
return this;
}
/**
* Whether access to this Team Drive and items inside this Team Drive is restricted to users of
* the domain to which this Team Drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDomainUsersOnly() {
return domainUsersOnly;
}
/**
* Whether access to this Team Drive and items inside this Team Drive is restricted to users of
* the domain to which this Team Drive belongs. This restriction may be overridden by other
* sharing policies controlled outside of this Team Drive.
* @param domainUsersOnly domainUsersOnly or {@code null} for none
*/
public Restrictions setDomainUsersOnly(java.lang.Boolean domainUsersOnly) {
this.domainUsersOnly = domainUsersOnly;
return this;
}
/**
* Whether access to items inside this Team Drive is restricted to members of this Team Drive.
* @return value or {@code null} for none
*/
public java.lang.Boolean getTeamMembersOnly() {
return teamMembersOnly;
}
/**
* Whether access to items inside this Team Drive is restricted to members of this Team Drive.
* @param teamMembersOnly teamMembersOnly or {@code null} for none
*/
public Restrictions setTeamMembersOnly(java.lang.Boolean teamMembersOnly) {
this.teamMembersOnly = teamMembersOnly;
return this;
}
@Override
public Restrictions set(String fieldName, Object value) {
return (Restrictions) super.set(fieldName, value);
}
@Override
public Restrictions clone() {
return (Restrictions) super.clone();
}
}
}
|
googleapis/sdk-platform-java | 38,029 | java-common-protos/proto-google-common-protos/src/main/java/com/google/type/Expr.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/type/expr.proto
// Protobuf Java Version: 3.25.8
package com.google.type;
/**
*
*
* <pre>
* Represents a textual expression in the Common Expression Language (CEL)
* syntax. CEL is a C-like expression language. The syntax and semantics of CEL
* are documented at https://github.com/google/cel-spec.
*
* Example (Comparison):
*
* title: "Summary size limit"
* description: "Determines if a summary is less than 100 chars"
* expression: "document.summary.size() < 100"
*
* Example (Equality):
*
* title: "Requestor is owner"
* description: "Determines if requestor is the document owner"
* expression: "document.owner == request.auth.claims.email"
*
* Example (Logic):
*
* title: "Public documents"
* description: "Determine whether the document should be publicly visible"
* expression: "document.type != 'private' && document.type != 'internal'"
*
* Example (Data Manipulation):
*
* title: "Notification string"
* description: "Create a notification string with a timestamp."
* expression: "'New message received at ' + string(document.create_time)"
*
* The exact variables and functions that may be referenced within an expression
* are determined by the service that evaluates it. See the service
* documentation for additional information.
* </pre>
*
* Protobuf type {@code google.type.Expr}
*/
public final class Expr extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.type.Expr)
ExprOrBuilder {
private static final long serialVersionUID = 0L;
// Use Expr.newBuilder() to construct.
private Expr(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Expr() {
expression_ = "";
title_ = "";
description_ = "";
location_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Expr();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.type.ExprProto.internal_static_google_type_Expr_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.type.ExprProto.internal_static_google_type_Expr_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.type.Expr.class, com.google.type.Expr.Builder.class);
}
public static final int EXPRESSION_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object expression_ = "";
/**
*
*
* <pre>
* Textual representation of an expression in Common Expression Language
* syntax.
* </pre>
*
* <code>string expression = 1;</code>
*
* @return The expression.
*/
@java.lang.Override
public java.lang.String getExpression() {
java.lang.Object ref = expression_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
expression_ = s;
return s;
}
}
/**
*
*
* <pre>
* Textual representation of an expression in Common Expression Language
* syntax.
* </pre>
*
* <code>string expression = 1;</code>
*
* @return The bytes for expression.
*/
@java.lang.Override
public com.google.protobuf.ByteString getExpressionBytes() {
java.lang.Object ref = expression_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
expression_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TITLE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object title_ = "";
/**
*
*
* <pre>
* Optional. Title for the expression, i.e. a short string describing
* its purpose. This can be used e.g. in UIs which allow to enter the
* expression.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The title.
*/
@java.lang.Override
public java.lang.String getTitle() {
java.lang.Object ref = title_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
title_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Title for the expression, i.e. a short string describing
* its purpose. This can be used e.g. in UIs which allow to enter the
* expression.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The bytes for title.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTitleBytes() {
java.lang.Object ref = title_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
title_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DESCRIPTION_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object description_ = "";
/**
*
*
* <pre>
* Optional. Description of the expression. This is a longer text which
* describes the expression, e.g. when hovered over it in a UI.
* </pre>
*
* <code>string description = 3;</code>
*
* @return The description.
*/
@java.lang.Override
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Description of the expression. This is a longer text which
* describes the expression, e.g. when hovered over it in a UI.
* </pre>
*
* <code>string description = 3;</code>
*
* @return The bytes for description.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LOCATION_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object location_ = "";
/**
*
*
* <pre>
* Optional. String indicating the location of the expression for error
* reporting, e.g. a file name and a position in the file.
* </pre>
*
* <code>string location = 4;</code>
*
* @return The location.
*/
@java.lang.Override
public java.lang.String getLocation() {
java.lang.Object ref = location_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
location_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. String indicating the location of the expression for error
* reporting, e.g. a file name and a position in the file.
* </pre>
*
* <code>string location = 4;</code>
*
* @return The bytes for location.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
location_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(expression_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, expression_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(title_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, title_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, description_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(location_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, location_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(expression_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, expression_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(title_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, title_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, description_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(location_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, location_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.type.Expr)) {
return super.equals(obj);
}
com.google.type.Expr other = (com.google.type.Expr) obj;
if (!getExpression().equals(other.getExpression())) return false;
if (!getTitle().equals(other.getTitle())) return false;
if (!getDescription().equals(other.getDescription())) return false;
if (!getLocation().equals(other.getLocation())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + EXPRESSION_FIELD_NUMBER;
hash = (53 * hash) + getExpression().hashCode();
hash = (37 * hash) + TITLE_FIELD_NUMBER;
hash = (53 * hash) + getTitle().hashCode();
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
hash = (37 * hash) + LOCATION_FIELD_NUMBER;
hash = (53 * hash) + getLocation().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.type.Expr parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.type.Expr parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.type.Expr parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.type.Expr parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.type.Expr parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.type.Expr parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.type.Expr parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.type.Expr parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.type.Expr parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.type.Expr parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.type.Expr parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.type.Expr parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.type.Expr prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Represents a textual expression in the Common Expression Language (CEL)
* syntax. CEL is a C-like expression language. The syntax and semantics of CEL
* are documented at https://github.com/google/cel-spec.
*
* Example (Comparison):
*
* title: "Summary size limit"
* description: "Determines if a summary is less than 100 chars"
* expression: "document.summary.size() < 100"
*
* Example (Equality):
*
* title: "Requestor is owner"
* description: "Determines if requestor is the document owner"
* expression: "document.owner == request.auth.claims.email"
*
* Example (Logic):
*
* title: "Public documents"
* description: "Determine whether the document should be publicly visible"
* expression: "document.type != 'private' && document.type != 'internal'"
*
* Example (Data Manipulation):
*
* title: "Notification string"
* description: "Create a notification string with a timestamp."
* expression: "'New message received at ' + string(document.create_time)"
*
* The exact variables and functions that may be referenced within an expression
* are determined by the service that evaluates it. See the service
* documentation for additional information.
* </pre>
*
* Protobuf type {@code google.type.Expr}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.type.Expr)
com.google.type.ExprOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.type.ExprProto.internal_static_google_type_Expr_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.type.ExprProto.internal_static_google_type_Expr_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.type.Expr.class, com.google.type.Expr.Builder.class);
}
// Construct using com.google.type.Expr.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
expression_ = "";
title_ = "";
description_ = "";
location_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.type.ExprProto.internal_static_google_type_Expr_descriptor;
}
@java.lang.Override
public com.google.type.Expr getDefaultInstanceForType() {
return com.google.type.Expr.getDefaultInstance();
}
@java.lang.Override
public com.google.type.Expr build() {
com.google.type.Expr result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.type.Expr buildPartial() {
com.google.type.Expr result = new com.google.type.Expr(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.type.Expr result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.expression_ = expression_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.title_ = title_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.description_ = description_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.location_ = location_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.type.Expr) {
return mergeFrom((com.google.type.Expr) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.type.Expr other) {
if (other == com.google.type.Expr.getDefaultInstance()) return this;
if (!other.getExpression().isEmpty()) {
expression_ = other.expression_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getTitle().isEmpty()) {
title_ = other.title_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getDescription().isEmpty()) {
description_ = other.description_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getLocation().isEmpty()) {
location_ = other.location_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
expression_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
title_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
description_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
location_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object expression_ = "";
/**
*
*
* <pre>
* Textual representation of an expression in Common Expression Language
* syntax.
* </pre>
*
* <code>string expression = 1;</code>
*
* @return The expression.
*/
public java.lang.String getExpression() {
java.lang.Object ref = expression_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
expression_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Textual representation of an expression in Common Expression Language
* syntax.
* </pre>
*
* <code>string expression = 1;</code>
*
* @return The bytes for expression.
*/
public com.google.protobuf.ByteString getExpressionBytes() {
java.lang.Object ref = expression_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
expression_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Textual representation of an expression in Common Expression Language
* syntax.
* </pre>
*
* <code>string expression = 1;</code>
*
* @param value The expression to set.
* @return This builder for chaining.
*/
public Builder setExpression(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
expression_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Textual representation of an expression in Common Expression Language
* syntax.
* </pre>
*
* <code>string expression = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearExpression() {
expression_ = getDefaultInstance().getExpression();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Textual representation of an expression in Common Expression Language
* syntax.
* </pre>
*
* <code>string expression = 1;</code>
*
* @param value The bytes for expression to set.
* @return This builder for chaining.
*/
public Builder setExpressionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
expression_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object title_ = "";
/**
*
*
* <pre>
* Optional. Title for the expression, i.e. a short string describing
* its purpose. This can be used e.g. in UIs which allow to enter the
* expression.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The title.
*/
public java.lang.String getTitle() {
java.lang.Object ref = title_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
title_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Title for the expression, i.e. a short string describing
* its purpose. This can be used e.g. in UIs which allow to enter the
* expression.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The bytes for title.
*/
public com.google.protobuf.ByteString getTitleBytes() {
java.lang.Object ref = title_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
title_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Title for the expression, i.e. a short string describing
* its purpose. This can be used e.g. in UIs which allow to enter the
* expression.
* </pre>
*
* <code>string title = 2;</code>
*
* @param value The title to set.
* @return This builder for chaining.
*/
public Builder setTitle(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
title_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Title for the expression, i.e. a short string describing
* its purpose. This can be used e.g. in UIs which allow to enter the
* expression.
* </pre>
*
* <code>string title = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearTitle() {
title_ = getDefaultInstance().getTitle();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Title for the expression, i.e. a short string describing
* its purpose. This can be used e.g. in UIs which allow to enter the
* expression.
* </pre>
*
* <code>string title = 2;</code>
*
* @param value The bytes for title to set.
* @return This builder for chaining.
*/
public Builder setTitleBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
title_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object description_ = "";
/**
*
*
* <pre>
* Optional. Description of the expression. This is a longer text which
* describes the expression, e.g. when hovered over it in a UI.
* </pre>
*
* <code>string description = 3;</code>
*
* @return The description.
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Description of the expression. This is a longer text which
* describes the expression, e.g. when hovered over it in a UI.
* </pre>
*
* <code>string description = 3;</code>
*
* @return The bytes for description.
*/
public com.google.protobuf.ByteString getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Description of the expression. This is a longer text which
* describes the expression, e.g. when hovered over it in a UI.
* </pre>
*
* <code>string description = 3;</code>
*
* @param value The description to set.
* @return This builder for chaining.
*/
public Builder setDescription(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
description_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Description of the expression. This is a longer text which
* describes the expression, e.g. when hovered over it in a UI.
* </pre>
*
* <code>string description = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearDescription() {
description_ = getDefaultInstance().getDescription();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Description of the expression. This is a longer text which
* describes the expression, e.g. when hovered over it in a UI.
* </pre>
*
* <code>string description = 3;</code>
*
* @param value The bytes for description to set.
* @return This builder for chaining.
*/
public Builder setDescriptionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
description_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object location_ = "";
/**
*
*
* <pre>
* Optional. String indicating the location of the expression for error
* reporting, e.g. a file name and a position in the file.
* </pre>
*
* <code>string location = 4;</code>
*
* @return The location.
*/
public java.lang.String getLocation() {
java.lang.Object ref = location_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
location_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. String indicating the location of the expression for error
* reporting, e.g. a file name and a position in the file.
* </pre>
*
* <code>string location = 4;</code>
*
* @return The bytes for location.
*/
public com.google.protobuf.ByteString getLocationBytes() {
java.lang.Object ref = location_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
location_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. String indicating the location of the expression for error
* reporting, e.g. a file name and a position in the file.
* </pre>
*
* <code>string location = 4;</code>
*
* @param value The location to set.
* @return This builder for chaining.
*/
public Builder setLocation(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
location_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. String indicating the location of the expression for error
* reporting, e.g. a file name and a position in the file.
* </pre>
*
* <code>string location = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearLocation() {
location_ = getDefaultInstance().getLocation();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. String indicating the location of the expression for error
* reporting, e.g. a file name and a position in the file.
* </pre>
*
* <code>string location = 4;</code>
*
* @param value The bytes for location to set.
* @return This builder for chaining.
*/
public Builder setLocationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
location_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.type.Expr)
}
// @@protoc_insertion_point(class_scope:google.type.Expr)
private static final com.google.type.Expr DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.type.Expr();
}
public static com.google.type.Expr getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Expr> PARSER =
new com.google.protobuf.AbstractParser<Expr>() {
@java.lang.Override
public Expr parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Expr> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Expr> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.type.Expr getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hudi | 38,272 | hudi-client/hudi-client-common/src/main/java/org/apache/hudi/config/HoodieIndexConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.config;
import org.apache.hudi.common.config.ConfigClassProperty;
import org.apache.hudi.common.config.ConfigGroups;
import org.apache.hudi.common.config.ConfigProperty;
import org.apache.hudi.common.config.HoodieConfig;
import org.apache.hudi.common.config.HoodieStorageConfig;
import org.apache.hudi.common.engine.EngineType;
import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.exception.HoodieIndexException;
import org.apache.hudi.exception.HoodieNotSupportedException;
import org.apache.hudi.index.HoodieIndex;
import org.apache.hudi.index.bucket.partition.PartitionBucketIndexRule;
import org.apache.hudi.keygen.constant.KeyGeneratorOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.concurrent.Immutable;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Arrays;
import java.util.Properties;
import java.util.stream.Collectors;
import static org.apache.hudi.common.config.HoodieStorageConfig.BLOOM_FILTER_DYNAMIC_MAX_ENTRIES;
import static org.apache.hudi.common.config.HoodieStorageConfig.BLOOM_FILTER_FPP_VALUE;
import static org.apache.hudi.common.config.HoodieStorageConfig.BLOOM_FILTER_NUM_ENTRIES_VALUE;
import static org.apache.hudi.common.config.HoodieStorageConfig.BLOOM_FILTER_TYPE;
import static org.apache.hudi.index.HoodieIndex.IndexType.BLOOM;
import static org.apache.hudi.index.HoodieIndex.IndexType.BUCKET;
import static org.apache.hudi.index.HoodieIndex.IndexType.FLINK_STATE;
import static org.apache.hudi.index.HoodieIndex.IndexType.GLOBAL_BLOOM;
import static org.apache.hudi.index.HoodieIndex.IndexType.GLOBAL_SIMPLE;
import static org.apache.hudi.index.HoodieIndex.IndexType.INMEMORY;
import static org.apache.hudi.index.HoodieIndex.IndexType.RECORD_INDEX;
import static org.apache.hudi.index.HoodieIndex.IndexType.PARTITIONED_RECORD_INDEX;
import static org.apache.hudi.index.HoodieIndex.IndexType.SIMPLE;
/**
* Indexing related config.
*/
@Immutable
@ConfigClassProperty(name = "Common Index Configs",
groupName = ConfigGroups.Names.WRITE_CLIENT,
subGroupName = ConfigGroups.SubGroupNames.INDEX,
areCommonConfigs = true,
description = "")
public class HoodieIndexConfig extends HoodieConfig {
private static final Logger LOG = LoggerFactory.getLogger(HoodieIndexConfig.class);
public static final ConfigProperty<String> INDEX_TYPE = ConfigProperty
.key("hoodie.index.type")
// Builder#getDefaultIndexType has already set it according to engine type
.noDefaultValue()
.withValidValues(INMEMORY.name(), BLOOM.name(), GLOBAL_BLOOM.name(), SIMPLE.name(), GLOBAL_SIMPLE.name(),
BUCKET.name(), FLINK_STATE.name(), RECORD_INDEX.name(), PARTITIONED_RECORD_INDEX.name())
.withDocumentation(HoodieIndex.IndexType.class);
public static final ConfigProperty<String> INDEX_CLASS_NAME = ConfigProperty
.key("hoodie.index.class")
.defaultValue("")
.markAdvanced()
.withDocumentation("Full path of user-defined index class and must be a subclass of HoodieIndex class. "
+ "It will take precedence over the hoodie.index.type configuration if specified");
// ***** Bloom Index configs *****
public static final ConfigProperty<String> BLOOM_INDEX_PARALLELISM = ConfigProperty
.key("hoodie.bloom.index.parallelism")
.defaultValue("0")
.markAdvanced()
.withDocumentation("Only applies if index type is BLOOM. "
+ "This is the amount of parallelism for index lookup, which involves a shuffle. "
+ "By default, this is auto computed based on input workload characteristics. "
+ "If the parallelism is explicitly configured by the user, the user-configured "
+ "value is used in defining the actual parallelism. If the indexing stage is slow "
+ "due to the limited parallelism, you can increase this to tune the performance.");
public static final ConfigProperty<String> BLOOM_INDEX_PRUNE_BY_RANGES = ConfigProperty
.key("hoodie.bloom.index.prune.by.ranges")
.defaultValue("true")
.markAdvanced()
.withDocumentation("Only applies if index type is BLOOM. "
+ "When true, range information from files to leveraged speed up index lookups. Particularly helpful, "
+ "if the key has a monotonously increasing prefix, such as timestamp. "
+ "If the record key is completely random, it is better to turn this off, since range pruning will only "
+ " add extra overhead to the index lookup.");
public static final ConfigProperty<String> BLOOM_INDEX_USE_CACHING = ConfigProperty
.key("hoodie.bloom.index.use.caching")
.defaultValue("true")
.markAdvanced()
.withDocumentation("Only applies if index type is BLOOM."
+ "When true, the input RDD will cached to speed up index lookup by reducing IO "
+ "for computing parallelism or affected partitions");
public static final ConfigProperty<Boolean> BLOOM_INDEX_USE_METADATA = ConfigProperty
.key("hoodie.bloom.index.use.metadata")
.defaultValue(false)
.markAdvanced()
.sinceVersion("0.11.0")
.withDocumentation("Only applies if index type is BLOOM."
+ "When true, the index lookup uses bloom filters and column stats from metadata "
+ "table when available to speed up the process.");
public static final ConfigProperty<String> BLOOM_INDEX_TREE_BASED_FILTER = ConfigProperty
.key("hoodie.bloom.index.use.treebased.filter")
.defaultValue("true")
.markAdvanced()
.withDocumentation("Only applies if index type is BLOOM. "
+ "When true, interval tree based file pruning optimization is enabled. "
+ "This mode speeds-up file-pruning based on key ranges when compared with the brute-force mode");
// TODO: On by default. Once stable, we will remove the other mode.
public static final ConfigProperty<String> BLOOM_INDEX_BUCKETIZED_CHECKING = ConfigProperty
.key("hoodie.bloom.index.bucketized.checking")
.defaultValue("true")
.markAdvanced()
.withDocumentation("Only applies if index type is BLOOM. "
+ "When true, bucketized bloom filtering is enabled. "
+ "This reduces skew seen in sort based bloom index lookup");
public static final ConfigProperty<String> BLOOM_INDEX_BUCKETIZED_CHECKING_ENABLE_DYNAMIC_PARALLELISM = ConfigProperty
.key("hoodie.bloom.index.bucketized.checking.enable.dynamic.parallelism")
.defaultValue("false")
.markAdvanced()
.sinceVersion("1.1.0")
.withDocumentation("Only applies if index type is BLOOM and the bucketized bloom filtering "
+ "is enabled. When true, the index parallelism is determined by the number of file "
+ "groups to look up and the number of keys per bucket to split comparisons within a "
+ "file group; otherwise, the index parallelism is limited by the input parallelism. "
+ "PLEASE NOTE that if the bloom index parallelism (" + BLOOM_INDEX_PARALLELISM.key()
+ ") is configured, the bloom index parallelism takes effect instead of the input "
+ "parallelism and always limits the number of buckets calculated based on the number "
+ "of keys per bucket in the bucketized bloom filtering.");
public static final ConfigProperty<String> BLOOM_INDEX_FILE_GROUP_ID_KEY_SORTING = ConfigProperty
.key("hoodie.bloom.index.fileid.key.sorting.enable")
.defaultValue("false")
.markAdvanced()
.sinceVersion("1.1.0")
.withDocumentation("Only applies if index type is BLOOM. "
+ "When true, the global sorting based on the fileId and key is enabled during key lookup. "
+ "This reduces skew in the key lookup in the bloom index.");
public static final ConfigProperty<String> SIMPLE_INDEX_USE_CACHING = ConfigProperty
.key("hoodie.simple.index.use.caching")
.defaultValue("true")
.markAdvanced()
.withDocumentation("Only applies if index type is SIMPLE. "
+ "When true, the incoming writes will cached to speed up index lookup by reducing IO "
+ "for computing parallelism or affected partitions");
public static final ConfigProperty<String> SIMPLE_INDEX_PARALLELISM = ConfigProperty
.key("hoodie.simple.index.parallelism")
.defaultValue("0")
.markAdvanced()
.withDocumentation("Only applies if index type is SIMPLE. "
+ "This limits the parallelism of fetching records from the base files of affected "
+ "partitions. By default, this is auto computed based on input workload characteristics. "
+ "If the parallelism is explicitly configured by the user, the user-configured "
+ "value is used in defining the actual parallelism. If the indexing stage is slow "
+ "due to the limited parallelism, you can increase this to tune the performance.");
public static final ConfigProperty<String> GLOBAL_SIMPLE_INDEX_PARALLELISM = ConfigProperty
.key("hoodie.global.simple.index.parallelism")
.defaultValue("0")
.markAdvanced()
.withDocumentation("Only applies if index type is GLOBAL_SIMPLE. "
+ "This limits the parallelism of fetching records from the base files of all table "
+ "partitions. The index picks the configured parallelism if the number of base "
+ "files is larger than this configured value; otherwise, the number of base files "
+ "is used as the parallelism. If the indexing stage is slow due to the limited "
+ "parallelism, you can increase this to tune the performance.");
// 1B bloom filter checks happen in 250 seconds. 500ms to read a bloom filter.
// 10M checks in 2500ms, thus amortizing the cost of reading bloom filter across partitions.
public static final ConfigProperty<String> BLOOM_INDEX_KEYS_PER_BUCKET = ConfigProperty
.key("hoodie.bloom.index.keys.per.bucket")
.defaultValue("10000000")
.markAdvanced()
.withDocumentation("Only applies if bloomIndexBucketizedChecking is enabled and index type is bloom. "
+ "This configuration controls the “bucket” size which tracks the number of record-key checks made against "
+ "a single file and is the unit of work allocated to each partition performing bloom filter lookup. "
+ "A higher value would amortize the fixed cost of reading a bloom filter to memory.");
public static final ConfigProperty<String> BLOOM_INDEX_INPUT_STORAGE_LEVEL_VALUE = ConfigProperty
.key("hoodie.bloom.index.input.storage.level")
.defaultValue("MEMORY_AND_DISK_SER")
.markAdvanced()
.withDocumentation("Only applies when #bloomIndexUseCaching is set. Determine what level of persistence is used to cache input RDDs. "
+ "Refer to org.apache.spark.storage.StorageLevel for different values");
public static final ConfigProperty<String> SIMPLE_INDEX_INPUT_STORAGE_LEVEL_VALUE = ConfigProperty
.key("hoodie.simple.index.input.storage.level")
.defaultValue("MEMORY_AND_DISK_SER")
.markAdvanced()
.withDocumentation("Only applies when #simpleIndexUseCaching is set. Determine what level of persistence is used to cache input RDDs. "
+ "Refer to org.apache.spark.storage.StorageLevel for different values");
/**
* Only applies if index type is GLOBAL_BLOOM.
* <p>
* When set to true, an update to a record with a different partition from its existing one
* will insert the record to the new partition and delete it from the old partition.
* <p>
* When set to false, a record will be updated to the old partition.
*/
public static final ConfigProperty<String> BLOOM_INDEX_UPDATE_PARTITION_PATH_ENABLE = ConfigProperty
.key("hoodie.bloom.index.update.partition.path")
.defaultValue("true")
.markAdvanced()
.withDocumentation("Only applies if index type is GLOBAL_BLOOM. "
+ "When set to true, an update including the partition path of a record that already exists will result in "
+ "inserting the incoming record into the new partition and deleting the original record in the old partition. "
+ "When set to false, the original record will only be updated in the old partition");
public static final ConfigProperty<String> SIMPLE_INDEX_UPDATE_PARTITION_PATH_ENABLE = ConfigProperty
.key("hoodie.simple.index.update.partition.path")
.defaultValue("true")
.markAdvanced()
.withDocumentation("Similar to " + BLOOM_INDEX_UPDATE_PARTITION_PATH_ENABLE + ", but for simple index.");
public static final ConfigProperty<String> RECORD_INDEX_UPDATE_PARTITION_PATH_ENABLE = ConfigProperty
.key("hoodie.record.index.update.partition.path")
.defaultValue("false")
.markAdvanced()
.sinceVersion("0.14.0")
.withDocumentation("Similar to " + BLOOM_INDEX_UPDATE_PARTITION_PATH_ENABLE + ", but for record index.");
public static final ConfigProperty<String> GLOBAL_INDEX_RECONCILE_PARALLELISM = ConfigProperty
.key("hoodie.global.index.reconcile.parallelism")
.defaultValue("60")
.markAdvanced()
.withDocumentation("Only applies if index type is GLOBAL_BLOOM or GLOBAL_SIMPLE. "
+ "This controls the parallelism for deduplication during indexing where more than 1 record could be tagged due to partition update.");
/**
* ***** Bucket Index Configs *****
* Bucket Index is targeted to locate the record fast by hash in big data scenarios.
* A bucket size is recommended less than 3GB to avoid being too small.
* For more details and progress, see [HUDI-3039].
*/
/**
* Bucket Index Engine Type: implementation of bucket index
*
* SIMPLE:
* 0. Check `HoodieSimpleBucketLayout` for its supported operations.
* 1. Bucket num is fixed and requires rewriting the partition if we want to change it.
*
* CONSISTENT_HASHING:
* 0. Check `HoodieConsistentBucketLayout` for its supported operations.
* 1. Bucket num will auto-adjust by running clustering (still in progress)
*/
public static final ConfigProperty<String> BUCKET_INDEX_ENGINE_TYPE = ConfigProperty
.key("hoodie.index.bucket.engine")
.defaultValue(HoodieIndex.BucketIndexEngineType.SIMPLE.name())
.markAdvanced()
.sinceVersion("0.11.0")
.withDocumentation(HoodieIndex.BucketIndexEngineType.class);
/**
* Bucket num equals file groups num in each partition.
* Bucket num can be set according to partition size and file group size.
*
* In dynamic bucket index cases (e.g., using CONSISTENT_HASHING), this config of number of bucket serves as a initial bucket size
*/
public static final ConfigProperty<Integer> BUCKET_INDEX_NUM_BUCKETS = ConfigProperty
.key("hoodie.bucket.index.num.buckets")
.defaultValue(256)
.markAdvanced()
.withDocumentation("Only applies if index type is BUCKET. Determine the number of buckets in the hudi table, "
+ "and each partition is divided to N buckets.");
public static final ConfigProperty<Boolean> BUCKET_PARTITIONER = ConfigProperty
.key("hoodie.bucket.index.remote.partitioner.enable")
.defaultValue(false)
.withDocumentation("Use Remote Partitioner using centralized allocation of partition "
+ "IDs to do repartition based on bucket aiming to resolve data skew. Default local hash partitioner");
public static final ConfigProperty<String> BUCKET_INDEX_PARTITION_RULE_TYPE = ConfigProperty
.key("hoodie.bucket.index.partition.rule.type")
.defaultValue(PartitionBucketIndexRule.REGEX.name)
.markAdvanced()
.withDocumentation("Rule parser for expressions when using partition level bucket index, default regex.");
public static final ConfigProperty<String> BUCKET_INDEX_PARTITION_EXPRESSIONS = ConfigProperty
.key("hoodie.bucket.index.partition.expressions")
.noDefaultValue()
.markAdvanced()
.withDocumentation("Users can use this parameter to specify expression and the corresponding bucket "
+ "numbers (separated by commas).Multiple rules are separated by semicolons like "
+ "hoodie.bucket.index.partition.expressions=expression1,bucket-number1;expression2,bucket-number2");
public static final ConfigProperty<String> BUCKET_INDEX_MAX_NUM_BUCKETS = ConfigProperty
.key("hoodie.bucket.index.max.num.buckets")
.noDefaultValue()
.markAdvanced()
.sinceVersion("0.13.0")
.withDocumentation("Only applies if bucket index engine is consistent hashing. Determine the upper bound of "
+ "the number of buckets in the hudi table. Bucket resizing cannot be done higher than this max limit.");
public static final ConfigProperty<String> BUCKET_INDEX_MIN_NUM_BUCKETS = ConfigProperty
.key("hoodie.bucket.index.min.num.buckets")
.noDefaultValue()
.markAdvanced()
.sinceVersion("0.13.0")
.withDocumentation("Only applies if bucket index engine is consistent hashing. Determine the lower bound of "
+ "the number of buckets in the hudi table. Bucket resizing cannot be done lower than this min limit.");
public static final ConfigProperty<String> BUCKET_INDEX_HASH_FIELD = ConfigProperty
.key("hoodie.bucket.index.hash.field")
.noDefaultValue()
.markAdvanced()
.withDocumentation("Index key. It is used to index the record and find its file group. "
+ "If not set, use record key field as default");
public static final ConfigProperty<Double> BUCKET_SPLIT_THRESHOLD = ConfigProperty
.key("hoodie.bucket.index.split.threshold")
.defaultValue(2.0)
.markAdvanced()
.sinceVersion("0.13.0")
.withDocumentation("Control if the bucket should be split when using consistent hashing bucket index."
+ "Specifically, if a file slice size reaches `hoodie.xxxx.max.file.size` * threshold, then split will be carried out.");
public static final ConfigProperty<Double> BUCKET_MERGE_THRESHOLD = ConfigProperty
.key("hoodie.bucket.index.merge.threshold")
.defaultValue(0.2)
.markAdvanced()
.sinceVersion("0.13.0")
.withDocumentation("Control if buckets should be merged when using consistent hashing bucket index"
+ "Specifically, if a file slice size is smaller than `hoodie.xxxx.max.file.size` * threshold, then it will be considered"
+ "as a merge candidate.");
public static final ConfigProperty<String> RECORD_INDEX_USE_CACHING = ConfigProperty
.key("hoodie.record.index.use.caching")
.defaultValue("true")
.markAdvanced()
.sinceVersion("0.14.0")
.withDocumentation("Only applies if index type is RECORD_INDEX."
+ "When true, the input RDD will be cached to speed up index lookup by reducing IO "
+ "for computing parallelism or affected partitions");
public static final ConfigProperty<String> RECORD_INDEX_INPUT_STORAGE_LEVEL_VALUE = ConfigProperty
.key("hoodie.record.index.input.storage.level")
.defaultValue("MEMORY_AND_DISK_SER")
.markAdvanced()
.sinceVersion("0.14.0")
.withDocumentation("Only applies when #recordIndexUseCaching is set. Determine what level of persistence is used to cache input RDDs. "
+ "Refer to org.apache.spark.storage.StorageLevel for different values");
public static final ConfigProperty<Boolean> BUCKET_QUERY_INDEX = ConfigProperty
.key("hoodie.bucket.index.query.pruning")
.defaultValue(true)
.withDocumentation("Control if table with bucket index use bucket query or not");
/** @deprecated Use {@link #INDEX_TYPE} and its methods instead */
@Deprecated
public static final String INDEX_TYPE_PROP = INDEX_TYPE.key();
/**
* @deprecated Use {@link #INDEX_CLASS_NAME} and its methods instead
*/
@Deprecated
public static final String INDEX_CLASS_PROP = INDEX_CLASS_NAME.key();
/**
* @deprecated Use {@link #INDEX_CLASS_NAME} and its methods instead
*/
@Deprecated
public static final String DEFAULT_INDEX_CLASS = INDEX_CLASS_NAME.defaultValue();
/**
* @deprecated Use {@link HoodieStorageConfig#BLOOM_FILTER_NUM_ENTRIES_VALUE} and its methods instead
*/
@Deprecated
public static final String BLOOM_FILTER_NUM_ENTRIES = BLOOM_FILTER_NUM_ENTRIES_VALUE.key();
/**
* @deprecated Use {@link HoodieStorageConfig#BLOOM_FILTER_NUM_ENTRIES_VALUE} and its methods instead
*/
@Deprecated
public static final String DEFAULT_BLOOM_FILTER_NUM_ENTRIES = BLOOM_FILTER_NUM_ENTRIES_VALUE.defaultValue();
/**
* @deprecated Use {@link HoodieStorageConfig#BLOOM_FILTER_FPP_VALUE} and its methods instead
*/
@Deprecated
public static final String BLOOM_FILTER_FPP = BLOOM_FILTER_FPP_VALUE.key();
/**
* @deprecated Use {@link HoodieStorageConfig#BLOOM_FILTER_FPP_VALUE} and its methods instead
*/
@Deprecated
public static final String DEFAULT_BLOOM_FILTER_FPP = BLOOM_FILTER_FPP_VALUE.defaultValue();
/**
* @deprecated Use {@link #BLOOM_INDEX_PARALLELISM} and its methods instead
*/
@Deprecated
public static final String BLOOM_INDEX_PARALLELISM_PROP = BLOOM_INDEX_PARALLELISM.key();
/**
* @deprecated Use {@link #BLOOM_INDEX_PARALLELISM} and its methods instead
*/
@Deprecated
public static final String DEFAULT_BLOOM_INDEX_PARALLELISM = BLOOM_INDEX_PARALLELISM.defaultValue();
/**
* @deprecated Use {@link #BLOOM_INDEX_PRUNE_BY_RANGES} and its methods instead
*/
@Deprecated
public static final String BLOOM_INDEX_PRUNE_BY_RANGES_PROP = BLOOM_INDEX_PRUNE_BY_RANGES.key();
/** @deprecated Use {@link #BLOOM_INDEX_PRUNE_BY_RANGES} and its methods instead */
@Deprecated
public static final String DEFAULT_BLOOM_INDEX_PRUNE_BY_RANGES = BLOOM_INDEX_PRUNE_BY_RANGES.defaultValue();
/** @deprecated Use {@link #BLOOM_INDEX_USE_CACHING} and its methods instead */
@Deprecated
public static final String BLOOM_INDEX_USE_CACHING_PROP = BLOOM_INDEX_USE_CACHING.key();
/** @deprecated Use {@link #BLOOM_INDEX_USE_CACHING} and its methods instead */
@Deprecated
public static final String DEFAULT_BLOOM_INDEX_USE_CACHING = BLOOM_INDEX_USE_CACHING.defaultValue();
/** @deprecated Use {@link #BLOOM_INDEX_TREE_BASED_FILTER} and its methods instead */
@Deprecated
public static final String BLOOM_INDEX_TREE_BASED_FILTER_PROP = BLOOM_INDEX_TREE_BASED_FILTER.key();
/** @deprecated Use {@link #BLOOM_INDEX_TREE_BASED_FILTER} and its methods instead */
@Deprecated
public static final String DEFAULT_BLOOM_INDEX_TREE_BASED_FILTER = BLOOM_INDEX_TREE_BASED_FILTER.defaultValue();
/**
* @deprecated Use {@link #BLOOM_INDEX_BUCKETIZED_CHECKING} and its methods instead
*/
@Deprecated
public static final String BLOOM_INDEX_BUCKETIZED_CHECKING_PROP = BLOOM_INDEX_BUCKETIZED_CHECKING.key();
/**
* @deprecated Use {@link #BLOOM_INDEX_BUCKETIZED_CHECKING} and its methods instead
*/
@Deprecated
public static final String DEFAULT_BLOOM_INDEX_BUCKETIZED_CHECKING = BLOOM_INDEX_BUCKETIZED_CHECKING.defaultValue();
/**
* @deprecated Use {@link HoodieStorageConfig#BLOOM_FILTER_TYPE} and its methods instead
*/
@Deprecated
public static final String BLOOM_INDEX_FILTER_TYPE = BLOOM_FILTER_TYPE.key();
/**
* @deprecated Use {@link HoodieStorageConfig#BLOOM_FILTER_TYPE} and its methods instead
*/
@Deprecated
public static final String DEFAULT_BLOOM_INDEX_FILTER_TYPE = BLOOM_FILTER_TYPE.defaultValue();
/**
* @deprecated Use {@link HoodieStorageConfig#BLOOM_FILTER_DYNAMIC_MAX_ENTRIES} and its methods instead
*/
@Deprecated
public static final String HOODIE_BLOOM_INDEX_FILTER_DYNAMIC_MAX_ENTRIES = BLOOM_FILTER_DYNAMIC_MAX_ENTRIES.key();
/**
* @deprecated Use {@link HoodieStorageConfig#BLOOM_FILTER_DYNAMIC_MAX_ENTRIES} and its methods instead
*/
@Deprecated
public static final String DEFAULT_HOODIE_BLOOM_INDEX_FILTER_DYNAMIC_MAX_ENTRIES = BLOOM_FILTER_DYNAMIC_MAX_ENTRIES.defaultValue();
/**
* @deprecated Use {@link #SIMPLE_INDEX_USE_CACHING} and its methods instead
*/
@Deprecated
public static final String SIMPLE_INDEX_USE_CACHING_PROP = SIMPLE_INDEX_USE_CACHING.key();
/**
* @deprecated Use {@link #SIMPLE_INDEX_USE_CACHING} and its methods instead
*/
@Deprecated
public static final String DEFAULT_SIMPLE_INDEX_USE_CACHING = SIMPLE_INDEX_USE_CACHING.defaultValue();
/** @deprecated Use {@link #SIMPLE_INDEX_PARALLELISM} and its methods instead */
@Deprecated
public static final String SIMPLE_INDEX_PARALLELISM_PROP = SIMPLE_INDEX_PARALLELISM.key();
/** @deprecated Use {@link #SIMPLE_INDEX_PARALLELISM} and its methods instead */
@Deprecated
public static final String DEFAULT_SIMPLE_INDEX_PARALLELISM = SIMPLE_INDEX_PARALLELISM.defaultValue();
/** @deprecated Use {@link #GLOBAL_SIMPLE_INDEX_PARALLELISM} and its methods instead */
@Deprecated
public static final String GLOBAL_SIMPLE_INDEX_PARALLELISM_PROP = GLOBAL_SIMPLE_INDEX_PARALLELISM.key();
/**
* @deprecated Use {@link #GLOBAL_SIMPLE_INDEX_PARALLELISM} and its methods instead
*/
@Deprecated
public static final String DEFAULT_GLOBAL_SIMPLE_INDEX_PARALLELISM = GLOBAL_SIMPLE_INDEX_PARALLELISM.defaultValue();
/**
* @deprecated Use {@link #BLOOM_INDEX_KEYS_PER_BUCKET} and its methods instead
*/
@Deprecated
public static final String BLOOM_INDEX_KEYS_PER_BUCKET_PROP = BLOOM_INDEX_KEYS_PER_BUCKET.key();
/**
* @deprecated Use {@link #BLOOM_INDEX_KEYS_PER_BUCKET} and its methods instead
*/
@Deprecated
public static final String DEFAULT_BLOOM_INDEX_KEYS_PER_BUCKET = BLOOM_INDEX_KEYS_PER_BUCKET.defaultValue();
/**
* @deprecated Use {@link #BLOOM_INDEX_INPUT_STORAGE_LEVEL_VALUE} and its methods instead
*/
@Deprecated
public static final String BLOOM_INDEX_INPUT_STORAGE_LEVEL = BLOOM_INDEX_INPUT_STORAGE_LEVEL_VALUE.key();
/**
* @deprecated Use {@link #BLOOM_INDEX_INPUT_STORAGE_LEVEL_VALUE} and its methods instead
*/
@Deprecated
public static final String DEFAULT_BLOOM_INDEX_INPUT_STORAGE_LEVEL = BLOOM_INDEX_INPUT_STORAGE_LEVEL_VALUE.defaultValue();
/**
* @deprecated Use {@link #SIMPLE_INDEX_INPUT_STORAGE_LEVEL_VALUE} and its methods instead
*/
@Deprecated
public static final String SIMPLE_INDEX_INPUT_STORAGE_LEVEL = SIMPLE_INDEX_INPUT_STORAGE_LEVEL_VALUE.key();
/**
* @deprecated Use {@link #SIMPLE_INDEX_INPUT_STORAGE_LEVEL_VALUE} and its methods instead
*/
@Deprecated
public static final String DEFAULT_SIMPLE_INDEX_INPUT_STORAGE_LEVEL = SIMPLE_INDEX_INPUT_STORAGE_LEVEL_VALUE.defaultValue();
/**
* @deprecated Use {@link #BLOOM_INDEX_UPDATE_PARTITION_PATH_ENABLE} and its methods instead
*/
@Deprecated
public static final String BLOOM_INDEX_UPDATE_PARTITION_PATH = BLOOM_INDEX_UPDATE_PARTITION_PATH_ENABLE.key();
/**
* @deprecated Use {@link #BLOOM_INDEX_UPDATE_PARTITION_PATH_ENABLE} and its methods instead
*/
@Deprecated
public static final String DEFAULT_BLOOM_INDEX_UPDATE_PARTITION_PATH = BLOOM_INDEX_UPDATE_PARTITION_PATH_ENABLE.defaultValue();
/**
* @deprecated Use {@link #SIMPLE_INDEX_UPDATE_PARTITION_PATH_ENABLE} and its methods instead
*/
@Deprecated
public static final String SIMPLE_INDEX_UPDATE_PARTITION_PATH = SIMPLE_INDEX_UPDATE_PARTITION_PATH_ENABLE.key();
/**
* @deprecated Use {@link #SIMPLE_INDEX_UPDATE_PARTITION_PATH_ENABLE} and its methods instead
*/
@Deprecated
public static final String DEFAULT_SIMPLE_INDEX_UPDATE_PARTITION_PATH = SIMPLE_INDEX_UPDATE_PARTITION_PATH_ENABLE.defaultValue();
private final EngineType engineType;
/**
* Use Spark engine by default.
*/
private HoodieIndexConfig() {
this(EngineType.SPARK);
}
private HoodieIndexConfig(EngineType engineType) {
super();
this.engineType = engineType;
}
public static HoodieIndexConfig.Builder newBuilder() {
return new Builder();
}
public static class Builder {
private EngineType engineType = EngineType.SPARK;
private final HoodieIndexConfig hoodieIndexConfig = new HoodieIndexConfig();
public Builder fromFile(File propertiesFile) throws IOException {
try (FileReader reader = new FileReader(propertiesFile)) {
this.hoodieIndexConfig.getProps().load(reader);
return this;
}
}
public Builder fromProperties(Properties props) {
this.hoodieIndexConfig.getProps().putAll(props);
return this;
}
public Builder withIndexType(HoodieIndex.IndexType indexType) {
hoodieIndexConfig.setValue(INDEX_TYPE, indexType.name());
return this;
}
public Builder withBucketIndexEngineType(HoodieIndex.BucketIndexEngineType bucketType) {
hoodieIndexConfig.setValue(BUCKET_INDEX_ENGINE_TYPE, bucketType.name());
return this;
}
public Builder withIndexClass(String indexClass) {
hoodieIndexConfig.setValue(INDEX_CLASS_NAME, indexClass);
return this;
}
public Builder bloomFilterNumEntries(int numEntries) {
hoodieIndexConfig.setValue(BLOOM_FILTER_NUM_ENTRIES_VALUE, String.valueOf(numEntries));
return this;
}
public Builder bloomFilterFPP(double fpp) {
hoodieIndexConfig.setValue(BLOOM_FILTER_FPP_VALUE, String.valueOf(fpp));
return this;
}
public Builder bloomIndexParallelism(int parallelism) {
hoodieIndexConfig.setValue(BLOOM_INDEX_PARALLELISM, String.valueOf(parallelism));
return this;
}
public Builder bloomIndexPruneByRanges(boolean pruneRanges) {
hoodieIndexConfig.setValue(BLOOM_INDEX_PRUNE_BY_RANGES, String.valueOf(pruneRanges));
return this;
}
public Builder bloomIndexUseCaching(boolean useCaching) {
hoodieIndexConfig.setValue(BLOOM_INDEX_USE_CACHING, String.valueOf(useCaching));
return this;
}
public Builder bloomIndexUseMetadata(boolean useMetadata) {
hoodieIndexConfig.setValue(BLOOM_INDEX_USE_METADATA, String.valueOf(useMetadata));
return this;
}
public Builder bloomIndexTreebasedFilter(boolean useTreeFilter) {
hoodieIndexConfig.setValue(BLOOM_INDEX_TREE_BASED_FILTER, String.valueOf(useTreeFilter));
return this;
}
public Builder bloomIndexBucketizedChecking(boolean bucketizedChecking) {
hoodieIndexConfig.setValue(BLOOM_INDEX_BUCKETIZED_CHECKING, String.valueOf(bucketizedChecking));
return this;
}
public Builder enableBloomIndexFileGroupIdKeySorting(boolean fileGroupIdKeySorting) {
hoodieIndexConfig.setValue(BLOOM_INDEX_FILE_GROUP_ID_KEY_SORTING, String.valueOf(fileGroupIdKeySorting));
return this;
}
public Builder bloomIndexKeysPerBucket(int keysPerBucket) {
hoodieIndexConfig.setValue(BLOOM_INDEX_KEYS_PER_BUCKET, String.valueOf(keysPerBucket));
return this;
}
public Builder withBloomIndexInputStorageLevel(String level) {
hoodieIndexConfig.setValue(BLOOM_INDEX_INPUT_STORAGE_LEVEL_VALUE, level);
return this;
}
public Builder withGlobalBloomIndexUpdatePartitionPath(boolean updatePartitionPath) {
hoodieIndexConfig.setValue(BLOOM_INDEX_UPDATE_PARTITION_PATH_ENABLE, String.valueOf(updatePartitionPath));
return this;
}
public Builder withSimpleIndexParallelism(int parallelism) {
hoodieIndexConfig.setValue(SIMPLE_INDEX_PARALLELISM, String.valueOf(parallelism));
return this;
}
public Builder simpleIndexUseCaching(boolean useCaching) {
hoodieIndexConfig.setValue(SIMPLE_INDEX_USE_CACHING, String.valueOf(useCaching));
return this;
}
public Builder withSimpleIndexInputStorageLevel(String level) {
hoodieIndexConfig.setValue(SIMPLE_INDEX_INPUT_STORAGE_LEVEL_VALUE, level);
return this;
}
public Builder withGlobalSimpleIndexParallelism(int parallelism) {
hoodieIndexConfig.setValue(GLOBAL_SIMPLE_INDEX_PARALLELISM, String.valueOf(parallelism));
return this;
}
public Builder withGlobalSimpleIndexUpdatePartitionPath(boolean updatePartitionPath) {
hoodieIndexConfig.setValue(SIMPLE_INDEX_UPDATE_PARTITION_PATH_ENABLE, String.valueOf(updatePartitionPath));
return this;
}
public Builder withRecordIndexUpdatePartitionPath(boolean updatePartitionPath) {
hoodieIndexConfig.setValue(RECORD_INDEX_UPDATE_PARTITION_PATH_ENABLE, String.valueOf(updatePartitionPath));
return this;
}
public Builder withGlobalIndexReconcileParallelism(int parallelism) {
hoodieIndexConfig.setValue(GLOBAL_INDEX_RECONCILE_PARALLELISM, String.valueOf(parallelism));
return this;
}
public Builder withEngineType(EngineType engineType) {
this.engineType = engineType;
return this;
}
public Builder withBucketNum(String bucketNum) {
hoodieIndexConfig.setValue(BUCKET_INDEX_NUM_BUCKETS, bucketNum);
return this;
}
public Builder withBucketMaxNum(int bucketMaxNum) {
hoodieIndexConfig.setValue(BUCKET_INDEX_MAX_NUM_BUCKETS, String.valueOf(bucketMaxNum));
return this;
}
public Builder enableBucketRemotePartitioner(boolean enableRemotePartitioner) {
hoodieIndexConfig.setValue(BUCKET_PARTITIONER, String.valueOf(enableRemotePartitioner));
return this;
}
public Builder withBucketMinNum(int bucketMinNum) {
hoodieIndexConfig.setValue(BUCKET_INDEX_MIN_NUM_BUCKETS, String.valueOf(bucketMinNum));
return this;
}
public Builder withIndexKeyField(String keyField) {
hoodieIndexConfig.setValue(BUCKET_INDEX_HASH_FIELD, keyField);
return this;
}
public Builder withRecordKeyField(String keyField) {
hoodieIndexConfig.setValue(KeyGeneratorOptions.RECORDKEY_FIELD_NAME, keyField);
return this;
}
public Builder recordIndexUseCaching(boolean useCaching) {
hoodieIndexConfig.setValue(RECORD_INDEX_USE_CACHING, String.valueOf(useCaching));
return this;
}
public Builder withRecordIndexInputStorageLevel(String level) {
hoodieIndexConfig.setValue(RECORD_INDEX_INPUT_STORAGE_LEVEL_VALUE, level);
return this;
}
public HoodieIndexConfig build() {
hoodieIndexConfig.setDefaultValue(INDEX_TYPE, getDefaultIndexType(engineType));
hoodieIndexConfig.setDefaults(HoodieIndexConfig.class.getName());
// Throws IllegalArgumentException if the value set is not a known Hoodie Index Type
HoodieIndex.IndexType.valueOf(hoodieIndexConfig.getString(INDEX_TYPE));
validateBucketIndexConfig();
return hoodieIndexConfig;
}
private String getDefaultIndexType(EngineType engineType) {
switch (engineType) {
case SPARK:
case JAVA:
return HoodieIndex.IndexType.SIMPLE.name();
case FLINK:
return HoodieIndex.IndexType.INMEMORY.name();
default:
throw new HoodieNotSupportedException("Unsupported engine " + engineType);
}
}
public EngineType getEngineType() {
return engineType;
}
private void validateBucketIndexConfig() {
if (hoodieIndexConfig.getString(INDEX_TYPE).equalsIgnoreCase(HoodieIndex.IndexType.BUCKET.toString())) {
// check the bucket index hash field
if (StringUtils.isNullOrEmpty(hoodieIndexConfig.getString(BUCKET_INDEX_HASH_FIELD))) {
hoodieIndexConfig.setValue(BUCKET_INDEX_HASH_FIELD,
hoodieIndexConfig.getString(KeyGeneratorOptions.RECORDKEY_FIELD_NAME));
} else {
boolean valid = Arrays
.stream(hoodieIndexConfig.getString(KeyGeneratorOptions.RECORDKEY_FIELD_NAME).split(","))
.collect(Collectors.toSet())
.containsAll(Arrays.asList(hoodieIndexConfig.getString(BUCKET_INDEX_HASH_FIELD).split(",")));
if (!valid) {
throw new HoodieIndexException("Bucket index key (if configured) must be subset of record key.");
}
}
// check the bucket num
if (hoodieIndexConfig.getIntOrDefault(BUCKET_INDEX_NUM_BUCKETS) <= 0) {
throw new HoodieIndexException("When using bucket index, hoodie.bucket.index.num.buckets cannot be negative.");
}
int bucketNum = hoodieIndexConfig.getInt(BUCKET_INDEX_NUM_BUCKETS);
if (StringUtils.isNullOrEmpty(hoodieIndexConfig.getString(BUCKET_INDEX_MAX_NUM_BUCKETS))) {
hoodieIndexConfig.setValue(BUCKET_INDEX_MAX_NUM_BUCKETS, Integer.toString(bucketNum));
} else if (hoodieIndexConfig.getInt(BUCKET_INDEX_MAX_NUM_BUCKETS) < bucketNum) {
LOG.warn("Maximum bucket number is smaller than bucket number, maximum: {}, bucketNum: {}", hoodieIndexConfig.getInt(BUCKET_INDEX_MAX_NUM_BUCKETS), bucketNum);
hoodieIndexConfig.setValue(BUCKET_INDEX_MAX_NUM_BUCKETS, Integer.toString(bucketNum));
}
if (StringUtils.isNullOrEmpty(hoodieIndexConfig.getString(BUCKET_INDEX_MIN_NUM_BUCKETS))) {
hoodieIndexConfig.setValue(BUCKET_INDEX_MIN_NUM_BUCKETS, Integer.toString(bucketNum));
} else if (hoodieIndexConfig.getInt(BUCKET_INDEX_MIN_NUM_BUCKETS) > bucketNum) {
LOG.warn("Minimum bucket number is larger than the bucket number, minimum: {}, bucketNum: {}", hoodieIndexConfig.getInt(BUCKET_INDEX_MIN_NUM_BUCKETS), bucketNum);
hoodieIndexConfig.setValue(BUCKET_INDEX_MIN_NUM_BUCKETS, Integer.toString(bucketNum));
}
}
}
}
}
|
googleapis/google-cloud-java | 37,897 | java-alloydb/proto-google-cloud-alloydb-v1/src/main/java/com/google/cloud/alloydb/v1/ClusterOrBuilder.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1;
public interface ClusterOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.alloydb.v1.Cluster)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Output only. Cluster created from backup.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.BackupSource backup_source = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the backupSource field is set.
*/
boolean hasBackupSource();
/**
*
*
* <pre>
* Output only. Cluster created from backup.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.BackupSource backup_source = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The backupSource.
*/
com.google.cloud.alloydb.v1.BackupSource getBackupSource();
/**
*
*
* <pre>
* Output only. Cluster created from backup.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.BackupSource backup_source = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.alloydb.v1.BackupSourceOrBuilder getBackupSourceOrBuilder();
/**
*
*
* <pre>
* Output only. Cluster created via DMS migration.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MigrationSource migration_source = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the migrationSource field is set.
*/
boolean hasMigrationSource();
/**
*
*
* <pre>
* Output only. Cluster created via DMS migration.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MigrationSource migration_source = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The migrationSource.
*/
com.google.cloud.alloydb.v1.MigrationSource getMigrationSource();
/**
*
*
* <pre>
* Output only. Cluster created via DMS migration.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MigrationSource migration_source = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.alloydb.v1.MigrationSourceOrBuilder getMigrationSourceOrBuilder();
/**
*
*
* <pre>
* Output only. Cluster created from CloudSQL snapshot.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.CloudSQLBackupRunSource cloudsql_backup_run_source = 42 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the cloudsqlBackupRunSource field is set.
*/
boolean hasCloudsqlBackupRunSource();
/**
*
*
* <pre>
* Output only. Cluster created from CloudSQL snapshot.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.CloudSQLBackupRunSource cloudsql_backup_run_source = 42 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The cloudsqlBackupRunSource.
*/
com.google.cloud.alloydb.v1.CloudSQLBackupRunSource getCloudsqlBackupRunSource();
/**
*
*
* <pre>
* Output only. Cluster created from CloudSQL snapshot.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.CloudSQLBackupRunSource cloudsql_backup_run_source = 42 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.alloydb.v1.CloudSQLBackupRunSourceOrBuilder
getCloudsqlBackupRunSourceOrBuilder();
/**
*
*
* <pre>
* Output only. The name of the cluster resource with the format:
* * projects/{project}/locations/{region}/clusters/{cluster_id}
* where the cluster ID segment should satisfy the regex expression
* `[a-z0-9-]+`. For more details see https://google.aip.dev/122.
* The prefix of the cluster resource name is the name of the parent resource:
* * projects/{project}/locations/{region}
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The name.
*/
java.lang.String getName();
/**
*
*
* <pre>
* Output only. The name of the cluster resource with the format:
* * projects/{project}/locations/{region}/clusters/{cluster_id}
* where the cluster ID segment should satisfy the regex expression
* `[a-z0-9-]+`. For more details see https://google.aip.dev/122.
* The prefix of the cluster resource name is the name of the parent resource:
* * projects/{project}/locations/{region}
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for name.
*/
com.google.protobuf.ByteString getNameBytes();
/**
*
*
* <pre>
* User-settable and human-readable display name for the Cluster.
* </pre>
*
* <code>string display_name = 2;</code>
*
* @return The displayName.
*/
java.lang.String getDisplayName();
/**
*
*
* <pre>
* User-settable and human-readable display name for the Cluster.
* </pre>
*
* <code>string display_name = 2;</code>
*
* @return The bytes for displayName.
*/
com.google.protobuf.ByteString getDisplayNameBytes();
/**
*
*
* <pre>
* Output only. The system-generated UID of the resource. The UID is assigned
* when the resource is created, and it is retained until it is deleted.
* </pre>
*
* <code>string uid = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The uid.
*/
java.lang.String getUid();
/**
*
*
* <pre>
* Output only. The system-generated UID of the resource. The UID is assigned
* when the resource is created, and it is retained until it is deleted.
* </pre>
*
* <code>string uid = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for uid.
*/
com.google.protobuf.ByteString getUidBytes();
/**
*
*
* <pre>
* Output only. Create time stamp
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
boolean hasCreateTime();
/**
*
*
* <pre>
* Output only. Create time stamp
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
com.google.protobuf.Timestamp getCreateTime();
/**
*
*
* <pre>
* Output only. Create time stamp
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder();
/**
*
*
* <pre>
* Output only. Update time stamp
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the updateTime field is set.
*/
boolean hasUpdateTime();
/**
*
*
* <pre>
* Output only. Update time stamp
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The updateTime.
*/
com.google.protobuf.Timestamp getUpdateTime();
/**
*
*
* <pre>
* Output only. Update time stamp
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder();
/**
*
*
* <pre>
* Output only. Delete time stamp
* </pre>
*
* <code>.google.protobuf.Timestamp delete_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the deleteTime field is set.
*/
boolean hasDeleteTime();
/**
*
*
* <pre>
* Output only. Delete time stamp
* </pre>
*
* <code>.google.protobuf.Timestamp delete_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The deleteTime.
*/
com.google.protobuf.Timestamp getDeleteTime();
/**
*
*
* <pre>
* Output only. Delete time stamp
* </pre>
*
* <code>.google.protobuf.Timestamp delete_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder();
/**
*
*
* <pre>
* Labels as key value pairs
* </pre>
*
* <code>map<string, string> labels = 7;</code>
*/
int getLabelsCount();
/**
*
*
* <pre>
* Labels as key value pairs
* </pre>
*
* <code>map<string, string> labels = 7;</code>
*/
boolean containsLabels(java.lang.String key);
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String> getLabels();
/**
*
*
* <pre>
* Labels as key value pairs
* </pre>
*
* <code>map<string, string> labels = 7;</code>
*/
java.util.Map<java.lang.String, java.lang.String> getLabelsMap();
/**
*
*
* <pre>
* Labels as key value pairs
* </pre>
*
* <code>map<string, string> labels = 7;</code>
*/
/* nullable */
java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue);
/**
*
*
* <pre>
* Labels as key value pairs
* </pre>
*
* <code>map<string, string> labels = 7;</code>
*/
java.lang.String getLabelsOrThrow(java.lang.String key);
/**
*
*
* <pre>
* Output only. The current serving state of the cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.State state = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for state.
*/
int getStateValue();
/**
*
*
* <pre>
* Output only. The current serving state of the cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.State state = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The state.
*/
com.google.cloud.alloydb.v1.Cluster.State getState();
/**
*
*
* <pre>
* Output only. The type of the cluster. This is an output-only field and it's
* populated at the Cluster creation time or the Cluster promotion
* time. The cluster type is determined by which RPC was used to create
* the cluster (i.e. `CreateCluster` vs. `CreateSecondaryCluster`
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.ClusterType cluster_type = 24 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for clusterType.
*/
int getClusterTypeValue();
/**
*
*
* <pre>
* Output only. The type of the cluster. This is an output-only field and it's
* populated at the Cluster creation time or the Cluster promotion
* time. The cluster type is determined by which RPC was used to create
* the cluster (i.e. `CreateCluster` vs. `CreateSecondaryCluster`
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.ClusterType cluster_type = 24 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The clusterType.
*/
com.google.cloud.alloydb.v1.Cluster.ClusterType getClusterType();
/**
*
*
* <pre>
* Optional. The database engine major version. This is an optional field and
* it is populated at the Cluster creation time. If a database version is not
* supplied at cluster creation time, then a default database version will
* be used.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.DatabaseVersion database_version = 9 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for databaseVersion.
*/
int getDatabaseVersionValue();
/**
*
*
* <pre>
* Optional. The database engine major version. This is an optional field and
* it is populated at the Cluster creation time. If a database version is not
* supplied at cluster creation time, then a default database version will
* be used.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.DatabaseVersion database_version = 9 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The databaseVersion.
*/
com.google.cloud.alloydb.v1.DatabaseVersion getDatabaseVersion();
/**
* <code>
* .google.cloud.alloydb.v1.Cluster.NetworkConfig network_config = 29 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the networkConfig field is set.
*/
boolean hasNetworkConfig();
/**
* <code>
* .google.cloud.alloydb.v1.Cluster.NetworkConfig network_config = 29 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The networkConfig.
*/
com.google.cloud.alloydb.v1.Cluster.NetworkConfig getNetworkConfig();
/**
* <code>
* .google.cloud.alloydb.v1.Cluster.NetworkConfig network_config = 29 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.alloydb.v1.Cluster.NetworkConfigOrBuilder getNetworkConfigOrBuilder();
/**
*
*
* <pre>
* Required. The resource link for the VPC network in which cluster resources
* are created and from which they are accessible via Private IP. The network
* must belong to the same project as the cluster. It is specified in the
* form: `projects/{project}/global/networks/{network_id}`. This is required
* to create a cluster. Deprecated, use network_config.network instead.
* </pre>
*
* <code>
* string network = 10 [deprecated = true, (.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @deprecated google.cloud.alloydb.v1.Cluster.network is deprecated. See
* google/cloud/alloydb/v1/resources.proto;l=674
* @return The network.
*/
@java.lang.Deprecated
java.lang.String getNetwork();
/**
*
*
* <pre>
* Required. The resource link for the VPC network in which cluster resources
* are created and from which they are accessible via Private IP. The network
* must belong to the same project as the cluster. It is specified in the
* form: `projects/{project}/global/networks/{network_id}`. This is required
* to create a cluster. Deprecated, use network_config.network instead.
* </pre>
*
* <code>
* string network = 10 [deprecated = true, (.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @deprecated google.cloud.alloydb.v1.Cluster.network is deprecated. See
* google/cloud/alloydb/v1/resources.proto;l=674
* @return The bytes for network.
*/
@java.lang.Deprecated
com.google.protobuf.ByteString getNetworkBytes();
/**
*
*
* <pre>
* For Resource freshness validation (https://google.aip.dev/154)
* </pre>
*
* <code>string etag = 11;</code>
*
* @return The etag.
*/
java.lang.String getEtag();
/**
*
*
* <pre>
* For Resource freshness validation (https://google.aip.dev/154)
* </pre>
*
* <code>string etag = 11;</code>
*
* @return The bytes for etag.
*/
com.google.protobuf.ByteString getEtagBytes();
/**
*
*
* <pre>
* Annotations to allow client tools to store small amount of arbitrary data.
* This is distinct from labels.
* https://google.aip.dev/128
* </pre>
*
* <code>map<string, string> annotations = 12;</code>
*/
int getAnnotationsCount();
/**
*
*
* <pre>
* Annotations to allow client tools to store small amount of arbitrary data.
* This is distinct from labels.
* https://google.aip.dev/128
* </pre>
*
* <code>map<string, string> annotations = 12;</code>
*/
boolean containsAnnotations(java.lang.String key);
/** Use {@link #getAnnotationsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String> getAnnotations();
/**
*
*
* <pre>
* Annotations to allow client tools to store small amount of arbitrary data.
* This is distinct from labels.
* https://google.aip.dev/128
* </pre>
*
* <code>map<string, string> annotations = 12;</code>
*/
java.util.Map<java.lang.String, java.lang.String> getAnnotationsMap();
/**
*
*
* <pre>
* Annotations to allow client tools to store small amount of arbitrary data.
* This is distinct from labels.
* https://google.aip.dev/128
* </pre>
*
* <code>map<string, string> annotations = 12;</code>
*/
/* nullable */
java.lang.String getAnnotationsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue);
/**
*
*
* <pre>
* Annotations to allow client tools to store small amount of arbitrary data.
* This is distinct from labels.
* https://google.aip.dev/128
* </pre>
*
* <code>map<string, string> annotations = 12;</code>
*/
java.lang.String getAnnotationsOrThrow(java.lang.String key);
/**
*
*
* <pre>
* Output only. Reconciling (https://google.aip.dev/128#reconciliation).
* Set to true if the current state of Cluster does not match the user's
* intended state, and the service is actively updating the resource to
* reconcile them. This can happen due to user-triggered updates or
* system actions like failover or maintenance.
* </pre>
*
* <code>bool reconciling = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The reconciling.
*/
boolean getReconciling();
/**
*
*
* <pre>
* Input only. Initial user to setup during cluster creation. Required.
* If used in `RestoreCluster` this is ignored.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.UserPassword initial_user = 14 [(.google.api.field_behavior) = INPUT_ONLY];
* </code>
*
* @return Whether the initialUser field is set.
*/
boolean hasInitialUser();
/**
*
*
* <pre>
* Input only. Initial user to setup during cluster creation. Required.
* If used in `RestoreCluster` this is ignored.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.UserPassword initial_user = 14 [(.google.api.field_behavior) = INPUT_ONLY];
* </code>
*
* @return The initialUser.
*/
com.google.cloud.alloydb.v1.UserPassword getInitialUser();
/**
*
*
* <pre>
* Input only. Initial user to setup during cluster creation. Required.
* If used in `RestoreCluster` this is ignored.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.UserPassword initial_user = 14 [(.google.api.field_behavior) = INPUT_ONLY];
* </code>
*/
com.google.cloud.alloydb.v1.UserPasswordOrBuilder getInitialUserOrBuilder();
/**
*
*
* <pre>
* The automated backup policy for this cluster.
*
* If no policy is provided then the default policy will be used. If backups
* are supported for the cluster, the default policy takes one backup a day,
* has a backup window of 1 hour, and retains backups for 14 days.
* For more information on the defaults, consult the
* documentation for the message type.
* </pre>
*
* <code>.google.cloud.alloydb.v1.AutomatedBackupPolicy automated_backup_policy = 17;</code>
*
* @return Whether the automatedBackupPolicy field is set.
*/
boolean hasAutomatedBackupPolicy();
/**
*
*
* <pre>
* The automated backup policy for this cluster.
*
* If no policy is provided then the default policy will be used. If backups
* are supported for the cluster, the default policy takes one backup a day,
* has a backup window of 1 hour, and retains backups for 14 days.
* For more information on the defaults, consult the
* documentation for the message type.
* </pre>
*
* <code>.google.cloud.alloydb.v1.AutomatedBackupPolicy automated_backup_policy = 17;</code>
*
* @return The automatedBackupPolicy.
*/
com.google.cloud.alloydb.v1.AutomatedBackupPolicy getAutomatedBackupPolicy();
/**
*
*
* <pre>
* The automated backup policy for this cluster.
*
* If no policy is provided then the default policy will be used. If backups
* are supported for the cluster, the default policy takes one backup a day,
* has a backup window of 1 hour, and retains backups for 14 days.
* For more information on the defaults, consult the
* documentation for the message type.
* </pre>
*
* <code>.google.cloud.alloydb.v1.AutomatedBackupPolicy automated_backup_policy = 17;</code>
*/
com.google.cloud.alloydb.v1.AutomatedBackupPolicyOrBuilder getAutomatedBackupPolicyOrBuilder();
/**
*
*
* <pre>
* SSL configuration for this AlloyDB cluster.
* </pre>
*
* <code>.google.cloud.alloydb.v1.SslConfig ssl_config = 18 [deprecated = true];</code>
*
* @deprecated google.cloud.alloydb.v1.Cluster.ssl_config is deprecated. See
* google/cloud/alloydb/v1/resources.proto;l=709
* @return Whether the sslConfig field is set.
*/
@java.lang.Deprecated
boolean hasSslConfig();
/**
*
*
* <pre>
* SSL configuration for this AlloyDB cluster.
* </pre>
*
* <code>.google.cloud.alloydb.v1.SslConfig ssl_config = 18 [deprecated = true];</code>
*
* @deprecated google.cloud.alloydb.v1.Cluster.ssl_config is deprecated. See
* google/cloud/alloydb/v1/resources.proto;l=709
* @return The sslConfig.
*/
@java.lang.Deprecated
com.google.cloud.alloydb.v1.SslConfig getSslConfig();
/**
*
*
* <pre>
* SSL configuration for this AlloyDB cluster.
* </pre>
*
* <code>.google.cloud.alloydb.v1.SslConfig ssl_config = 18 [deprecated = true];</code>
*/
@java.lang.Deprecated
com.google.cloud.alloydb.v1.SslConfigOrBuilder getSslConfigOrBuilder();
/**
*
*
* <pre>
* Optional. The encryption config can be specified to encrypt the data disks
* and other persistent data resources of a cluster with a
* customer-managed encryption key (CMEK). When this field is not
* specified, the cluster will then use default encryption scheme to
* protect the user data.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.EncryptionConfig encryption_config = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the encryptionConfig field is set.
*/
boolean hasEncryptionConfig();
/**
*
*
* <pre>
* Optional. The encryption config can be specified to encrypt the data disks
* and other persistent data resources of a cluster with a
* customer-managed encryption key (CMEK). When this field is not
* specified, the cluster will then use default encryption scheme to
* protect the user data.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.EncryptionConfig encryption_config = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The encryptionConfig.
*/
com.google.cloud.alloydb.v1.EncryptionConfig getEncryptionConfig();
/**
*
*
* <pre>
* Optional. The encryption config can be specified to encrypt the data disks
* and other persistent data resources of a cluster with a
* customer-managed encryption key (CMEK). When this field is not
* specified, the cluster will then use default encryption scheme to
* protect the user data.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.EncryptionConfig encryption_config = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.alloydb.v1.EncryptionConfigOrBuilder getEncryptionConfigOrBuilder();
/**
*
*
* <pre>
* Output only. The encryption information for the cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.EncryptionInfo encryption_info = 20 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the encryptionInfo field is set.
*/
boolean hasEncryptionInfo();
/**
*
*
* <pre>
* Output only. The encryption information for the cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.EncryptionInfo encryption_info = 20 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The encryptionInfo.
*/
com.google.cloud.alloydb.v1.EncryptionInfo getEncryptionInfo();
/**
*
*
* <pre>
* Output only. The encryption information for the cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.EncryptionInfo encryption_info = 20 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.alloydb.v1.EncryptionInfoOrBuilder getEncryptionInfoOrBuilder();
/**
*
*
* <pre>
* Optional. Continuous backup configuration for this cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.ContinuousBackupConfig continuous_backup_config = 27 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the continuousBackupConfig field is set.
*/
boolean hasContinuousBackupConfig();
/**
*
*
* <pre>
* Optional. Continuous backup configuration for this cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.ContinuousBackupConfig continuous_backup_config = 27 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The continuousBackupConfig.
*/
com.google.cloud.alloydb.v1.ContinuousBackupConfig getContinuousBackupConfig();
/**
*
*
* <pre>
* Optional. Continuous backup configuration for this cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.ContinuousBackupConfig continuous_backup_config = 27 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.alloydb.v1.ContinuousBackupConfigOrBuilder getContinuousBackupConfigOrBuilder();
/**
*
*
* <pre>
* Output only. Continuous backup properties for this cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.ContinuousBackupInfo continuous_backup_info = 28 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the continuousBackupInfo field is set.
*/
boolean hasContinuousBackupInfo();
/**
*
*
* <pre>
* Output only. Continuous backup properties for this cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.ContinuousBackupInfo continuous_backup_info = 28 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The continuousBackupInfo.
*/
com.google.cloud.alloydb.v1.ContinuousBackupInfo getContinuousBackupInfo();
/**
*
*
* <pre>
* Output only. Continuous backup properties for this cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.ContinuousBackupInfo continuous_backup_info = 28 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.alloydb.v1.ContinuousBackupInfoOrBuilder getContinuousBackupInfoOrBuilder();
/**
*
*
* <pre>
* Cross Region replication config specific to SECONDARY cluster.
* </pre>
*
* <code>.google.cloud.alloydb.v1.Cluster.SecondaryConfig secondary_config = 22;</code>
*
* @return Whether the secondaryConfig field is set.
*/
boolean hasSecondaryConfig();
/**
*
*
* <pre>
* Cross Region replication config specific to SECONDARY cluster.
* </pre>
*
* <code>.google.cloud.alloydb.v1.Cluster.SecondaryConfig secondary_config = 22;</code>
*
* @return The secondaryConfig.
*/
com.google.cloud.alloydb.v1.Cluster.SecondaryConfig getSecondaryConfig();
/**
*
*
* <pre>
* Cross Region replication config specific to SECONDARY cluster.
* </pre>
*
* <code>.google.cloud.alloydb.v1.Cluster.SecondaryConfig secondary_config = 22;</code>
*/
com.google.cloud.alloydb.v1.Cluster.SecondaryConfigOrBuilder getSecondaryConfigOrBuilder();
/**
*
*
* <pre>
* Output only. Cross Region replication config specific to PRIMARY cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.PrimaryConfig primary_config = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the primaryConfig field is set.
*/
boolean hasPrimaryConfig();
/**
*
*
* <pre>
* Output only. Cross Region replication config specific to PRIMARY cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.PrimaryConfig primary_config = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The primaryConfig.
*/
com.google.cloud.alloydb.v1.Cluster.PrimaryConfig getPrimaryConfig();
/**
*
*
* <pre>
* Output only. Cross Region replication config specific to PRIMARY cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.PrimaryConfig primary_config = 23 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.alloydb.v1.Cluster.PrimaryConfigOrBuilder getPrimaryConfigOrBuilder();
/**
*
*
* <pre>
* Output only. Reserved for future use.
* </pre>
*
* <code>bool satisfies_pzs = 30 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The satisfiesPzs.
*/
boolean getSatisfiesPzs();
/**
*
*
* <pre>
* Optional. The configuration for Private Service Connect (PSC) for the
* cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.PscConfig psc_config = 31 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the pscConfig field is set.
*/
boolean hasPscConfig();
/**
*
*
* <pre>
* Optional. The configuration for Private Service Connect (PSC) for the
* cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.PscConfig psc_config = 31 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The pscConfig.
*/
com.google.cloud.alloydb.v1.Cluster.PscConfig getPscConfig();
/**
*
*
* <pre>
* Optional. The configuration for Private Service Connect (PSC) for the
* cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.PscConfig psc_config = 31 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.alloydb.v1.Cluster.PscConfigOrBuilder getPscConfigOrBuilder();
/**
*
*
* <pre>
* Optional. The maintenance update policy determines when to allow or deny
* updates.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MaintenanceUpdatePolicy maintenance_update_policy = 32 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the maintenanceUpdatePolicy field is set.
*/
boolean hasMaintenanceUpdatePolicy();
/**
*
*
* <pre>
* Optional. The maintenance update policy determines when to allow or deny
* updates.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MaintenanceUpdatePolicy maintenance_update_policy = 32 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The maintenanceUpdatePolicy.
*/
com.google.cloud.alloydb.v1.MaintenanceUpdatePolicy getMaintenanceUpdatePolicy();
/**
*
*
* <pre>
* Optional. The maintenance update policy determines when to allow or deny
* updates.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MaintenanceUpdatePolicy maintenance_update_policy = 32 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.alloydb.v1.MaintenanceUpdatePolicyOrBuilder
getMaintenanceUpdatePolicyOrBuilder();
/**
*
*
* <pre>
* Output only. The maintenance schedule for the cluster, generated for a
* specific rollout if a maintenance window is set.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MaintenanceSchedule maintenance_schedule = 37 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the maintenanceSchedule field is set.
*/
boolean hasMaintenanceSchedule();
/**
*
*
* <pre>
* Output only. The maintenance schedule for the cluster, generated for a
* specific rollout if a maintenance window is set.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MaintenanceSchedule maintenance_schedule = 37 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The maintenanceSchedule.
*/
com.google.cloud.alloydb.v1.MaintenanceSchedule getMaintenanceSchedule();
/**
*
*
* <pre>
* Output only. The maintenance schedule for the cluster, generated for a
* specific rollout if a maintenance window is set.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MaintenanceSchedule maintenance_schedule = 37 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.alloydb.v1.MaintenanceScheduleOrBuilder getMaintenanceScheduleOrBuilder();
/**
*
*
* <pre>
* Optional. Subscription type of the cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.SubscriptionType subscription_type = 38 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for subscriptionType.
*/
int getSubscriptionTypeValue();
/**
*
*
* <pre>
* Optional. Subscription type of the cluster.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.SubscriptionType subscription_type = 38 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The subscriptionType.
*/
com.google.cloud.alloydb.v1.SubscriptionType getSubscriptionType();
/**
*
*
* <pre>
* Output only. Metadata for free trial clusters
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.TrialMetadata trial_metadata = 39 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the trialMetadata field is set.
*/
boolean hasTrialMetadata();
/**
*
*
* <pre>
* Output only. Metadata for free trial clusters
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.TrialMetadata trial_metadata = 39 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The trialMetadata.
*/
com.google.cloud.alloydb.v1.Cluster.TrialMetadata getTrialMetadata();
/**
*
*
* <pre>
* Output only. Metadata for free trial clusters
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.Cluster.TrialMetadata trial_metadata = 39 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.cloud.alloydb.v1.Cluster.TrialMetadataOrBuilder getTrialMetadataOrBuilder();
/**
*
*
* <pre>
* Optional. Input only. Immutable. Tag keys/values directly bound to this
* resource. For example:
* ```
* "123/environment": "production",
* "123/costCenter": "marketing"
* ```
* </pre>
*
* <code>
* map<string, string> tags = 41 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE, (.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getTagsCount();
/**
*
*
* <pre>
* Optional. Input only. Immutable. Tag keys/values directly bound to this
* resource. For example:
* ```
* "123/environment": "production",
* "123/costCenter": "marketing"
* ```
* </pre>
*
* <code>
* map<string, string> tags = 41 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE, (.google.api.field_behavior) = OPTIONAL];
* </code>
*/
boolean containsTags(java.lang.String key);
/** Use {@link #getTagsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String> getTags();
/**
*
*
* <pre>
* Optional. Input only. Immutable. Tag keys/values directly bound to this
* resource. For example:
* ```
* "123/environment": "production",
* "123/costCenter": "marketing"
* ```
* </pre>
*
* <code>
* map<string, string> tags = 41 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE, (.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.Map<java.lang.String, java.lang.String> getTagsMap();
/**
*
*
* <pre>
* Optional. Input only. Immutable. Tag keys/values directly bound to this
* resource. For example:
* ```
* "123/environment": "production",
* "123/costCenter": "marketing"
* ```
* </pre>
*
* <code>
* map<string, string> tags = 41 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE, (.google.api.field_behavior) = OPTIONAL];
* </code>
*/
/* nullable */
java.lang.String getTagsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue);
/**
*
*
* <pre>
* Optional. Input only. Immutable. Tag keys/values directly bound to this
* resource. For example:
* ```
* "123/environment": "production",
* "123/costCenter": "marketing"
* ```
* </pre>
*
* <code>
* map<string, string> tags = 41 [(.google.api.field_behavior) = INPUT_ONLY, (.google.api.field_behavior) = IMMUTABLE, (.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.lang.String getTagsOrThrow(java.lang.String key);
com.google.cloud.alloydb.v1.Cluster.SourceCase getSourceCase();
}
|
apache/iotdb | 38,486 | iotdb-core/datanode/src/main/java/org/apache/iotdb/db/pipe/processor/aggregate/AggregateProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.pipe.processor.aggregate;
import org.apache.iotdb.commons.conf.CommonDescriptor;
import org.apache.iotdb.commons.consensus.DataRegionId;
import org.apache.iotdb.commons.consensus.index.ProgressIndex;
import org.apache.iotdb.commons.consensus.index.impl.MinimumProgressIndex;
import org.apache.iotdb.commons.consensus.index.impl.TimeWindowStateProgressIndex;
import org.apache.iotdb.commons.exception.IllegalPathException;
import org.apache.iotdb.commons.pipe.agent.task.meta.PipeTaskMeta;
import org.apache.iotdb.commons.pipe.config.plugin.env.PipeTaskProcessorRuntimeEnvironment;
import org.apache.iotdb.commons.pipe.event.EnrichedEvent;
import org.apache.iotdb.commons.utils.PathUtils;
import org.apache.iotdb.db.pipe.agent.PipeDataNodeAgent;
import org.apache.iotdb.db.pipe.agent.plugin.dataregion.PipeDataRegionPluginAgent;
import org.apache.iotdb.db.pipe.event.common.row.PipeResetTabletRow;
import org.apache.iotdb.db.pipe.event.common.row.PipeRow;
import org.apache.iotdb.db.pipe.event.common.row.PipeRowCollector;
import org.apache.iotdb.db.pipe.event.common.tablet.PipeInsertNodeTabletInsertionEvent;
import org.apache.iotdb.db.pipe.event.common.tablet.PipeRawTabletInsertionEvent;
import org.apache.iotdb.db.pipe.event.common.tsfile.PipeTsFileInsertionEvent;
import org.apache.iotdb.db.pipe.processor.aggregate.operator.aggregatedresult.AggregatedResultOperator;
import org.apache.iotdb.db.pipe.processor.aggregate.operator.intermediateresult.IntermediateResultOperator;
import org.apache.iotdb.db.pipe.processor.aggregate.operator.processor.AbstractOperatorProcessor;
import org.apache.iotdb.db.pipe.processor.aggregate.window.datastructure.WindowOutput;
import org.apache.iotdb.db.pipe.processor.aggregate.window.processor.AbstractWindowingProcessor;
import org.apache.iotdb.db.queryengine.transformation.dag.udf.UDFParametersFactory;
import org.apache.iotdb.db.storageengine.StorageEngine;
import org.apache.iotdb.pipe.api.PipeProcessor;
import org.apache.iotdb.pipe.api.access.Row;
import org.apache.iotdb.pipe.api.annotation.TreeModel;
import org.apache.iotdb.pipe.api.collector.EventCollector;
import org.apache.iotdb.pipe.api.collector.RowCollector;
import org.apache.iotdb.pipe.api.customizer.configuration.PipeProcessorRuntimeConfiguration;
import org.apache.iotdb.pipe.api.customizer.configuration.PipeRuntimeEnvironment;
import org.apache.iotdb.pipe.api.customizer.parameter.PipeParameterValidator;
import org.apache.iotdb.pipe.api.customizer.parameter.PipeParameters;
import org.apache.iotdb.pipe.api.event.Event;
import org.apache.iotdb.pipe.api.event.dml.insertion.TabletInsertionEvent;
import org.apache.iotdb.pipe.api.event.dml.insertion.TsFileInsertionEvent;
import org.apache.iotdb.pipe.api.exception.PipeException;
import org.apache.tsfile.common.conf.TSFileConfig;
import org.apache.tsfile.common.constant.TsFileConstant;
import org.apache.tsfile.enums.TSDataType;
import org.apache.tsfile.utils.Binary;
import org.apache.tsfile.utils.BitMap;
import org.apache.tsfile.utils.Pair;
import org.apache.tsfile.write.schema.MeasurementSchema;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_OPERATORS_DEFAULT_VALUE;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_OPERATORS_KEY;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_OUTPUT_DATABASE_DEFAULT_VALUE;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_OUTPUT_DATABASE_KEY;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_OUTPUT_MAX_DELAY_SECONDS_DEFAULT_VALUE;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_OUTPUT_MAX_DELAY_SECONDS_KEY;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_OUTPUT_MEASUREMENTS_DEFAULT_VALUE;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_OUTPUT_MEASUREMENTS_KEY;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_OUTPUT_MIN_REPORT_INTERVAL_SECONDS_DEFAULT_VALUE;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_OUTPUT_MIN_REPORT_INTERVAL_SECONDS_KEY;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_WINDOWING_STRATEGY_DEFAULT_VALUE;
import static org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant.PROCESSOR_WINDOWING_STRATEGY_KEY;
/**
* {@link AggregateProcessor} is a {@link PipeProcessor} that can adopt different implementations of
* {@link AbstractWindowingProcessor} as windowing strategy and use calculation methods from all the
* {@link AbstractOperatorProcessor}s to calculate the given operators. Both the {@link
* AbstractWindowingProcessor} and {@link AbstractOperatorProcessor} can be implemented by user and
* loaded as a normal {@link PipeProcessor}
*/
@TreeModel
public class AggregateProcessor implements PipeProcessor {
private static final String WINDOWING_PROCESSOR_SUFFIX = "-windowing-processor";
private String pipeName;
private String databaseWithPathSeparator;
private PipeTaskMeta pipeTaskMeta;
private long outputMaxDelayMilliseconds;
private long outputMinReportIntervalMilliseconds;
private String outputDatabaseWithPathSeparator;
private final Map<String, AggregatedResultOperator> outputName2OperatorMap = new HashMap<>();
private final Map<String, Supplier<IntermediateResultOperator>>
intermediateResultName2OperatorSupplierMap = new HashMap<>();
private final Map<String, String> systemParameters = new HashMap<>();
private static final Map<String, Integer> pipeName2referenceCountMap = new ConcurrentHashMap<>();
private static final Map<String, AtomicLong> pipeName2LastValueReceiveTimeMap =
new ConcurrentHashMap<>();
private static final ConcurrentMap<
String, ConcurrentMap<String, AtomicReference<TimeSeriesRuntimeState>>>
pipeName2timeSeries2TimeSeriesRuntimeStateMap = new ConcurrentHashMap<>();
private AbstractWindowingProcessor windowingProcessor;
private final List<AbstractOperatorProcessor> operatorProcessors = new ArrayList<>();
// Static values, calculated on initialization
private String[] columnNameStringList;
private String dataBaseName;
private Boolean isTableModel;
@Override
public void validate(final PipeParameterValidator validator) throws Exception {
final PipeParameters parameters = validator.getParameters();
validator
.validate(
arg -> !((String) arg).isEmpty(),
String.format("The parameter %s must not be empty.", PROCESSOR_OPERATORS_KEY),
parameters.getStringOrDefault(
PROCESSOR_OPERATORS_KEY, PROCESSOR_OPERATORS_DEFAULT_VALUE))
.validate(
arg -> !((String) arg).isEmpty(),
String.format("The parameter %s must not be empty.", PROCESSOR_WINDOWING_STRATEGY_KEY),
parameters.getStringOrDefault(
PROCESSOR_WINDOWING_STRATEGY_KEY, PROCESSOR_WINDOWING_STRATEGY_DEFAULT_VALUE))
.validate(
arg -> ((String) arg).isEmpty() || ((String) arg).startsWith("root."),
String.format(
"The output database %s shall start with root.",
parameters.getStringOrDefault(
PROCESSOR_OUTPUT_DATABASE_KEY, PROCESSOR_OUTPUT_DATABASE_DEFAULT_VALUE)),
parameters.getStringOrDefault(
PROCESSOR_OUTPUT_DATABASE_KEY, PROCESSOR_OUTPUT_DATABASE_DEFAULT_VALUE))
.validate(
arg ->
((String) arg).isEmpty()
|| Arrays.stream(((String) arg).replace(" ", "").split(","))
.allMatch(this::isLegalMeasurement),
String.format(
"The output measurements %s contains illegal measurements, the measurements must be the last level of a legal path",
parameters.getStringOrDefault(
PROCESSOR_OUTPUT_MEASUREMENTS_KEY,
PROCESSOR_OUTPUT_MEASUREMENTS_DEFAULT_VALUE)),
parameters.getStringOrDefault(
PROCESSOR_OUTPUT_MEASUREMENTS_KEY, PROCESSOR_OUTPUT_MEASUREMENTS_DEFAULT_VALUE));
}
private boolean isLegalMeasurement(final String measurement) {
try {
PathUtils.isLegalPath("root." + measurement);
} catch (final IllegalPathException e) {
return false;
}
return measurement.startsWith("`") && measurement.endsWith("`") || !measurement.contains(".");
}
@Override
public void customize(
final PipeParameters parameters, final PipeProcessorRuntimeConfiguration configuration)
throws Exception {
final PipeRuntimeEnvironment environment = configuration.getRuntimeEnvironment();
pipeName = environment.getPipeName();
dataBaseName =
StorageEngine.getInstance()
.getDataRegion(new DataRegionId(environment.getRegionId()))
.getDatabaseName();
if (dataBaseName != null) {
isTableModel = PathUtils.isTableModelDatabase(dataBaseName);
}
pipeName2referenceCountMap.compute(
pipeName, (name, count) -> Objects.nonNull(count) ? count + 1 : 1);
pipeName2timeSeries2TimeSeriesRuntimeStateMap.putIfAbsent(pipeName, new ConcurrentHashMap<>());
databaseWithPathSeparator =
StorageEngine.getInstance()
.getDataRegion(
new DataRegionId(configuration.getRuntimeEnvironment().getRegionId()))
.getDatabaseName()
+ TsFileConstant.PATH_SEPARATOR;
pipeTaskMeta =
((PipeTaskProcessorRuntimeEnvironment) configuration.getRuntimeEnvironment())
.getPipeTaskMeta();
// Load parameters
final long outputMaxDelaySeconds =
parameters.getLongOrDefault(
PROCESSOR_OUTPUT_MAX_DELAY_SECONDS_KEY,
PROCESSOR_OUTPUT_MAX_DELAY_SECONDS_DEFAULT_VALUE);
// The output max delay milliseconds must be set to at least 1
// to guarantee the correctness of the CAS in last receive time
outputMaxDelayMilliseconds =
outputMaxDelaySeconds < 0 ? Long.MAX_VALUE : Math.max(outputMaxDelaySeconds * 1000, 1);
outputMinReportIntervalMilliseconds =
parameters.getLongOrDefault(
PROCESSOR_OUTPUT_MIN_REPORT_INTERVAL_SECONDS_KEY,
PROCESSOR_OUTPUT_MIN_REPORT_INTERVAL_SECONDS_DEFAULT_VALUE)
* 1000;
final String outputDatabase =
parameters.getStringOrDefault(
PROCESSOR_OUTPUT_DATABASE_KEY, PROCESSOR_OUTPUT_DATABASE_DEFAULT_VALUE);
outputDatabaseWithPathSeparator =
outputDatabase.isEmpty() ? outputDatabase : outputDatabase + TsFileConstant.PATH_SEPARATOR;
// Set output name
final List<String> operatorNameList =
Arrays.stream(
parameters
.getStringOrDefault(PROCESSOR_OPERATORS_KEY, PROCESSOR_OPERATORS_DEFAULT_VALUE)
.replace(" ", "")
.split(","))
.collect(Collectors.toList());
final String outputMeasurementString =
parameters.getStringOrDefault(
PROCESSOR_OUTPUT_MEASUREMENTS_KEY, PROCESSOR_OUTPUT_MEASUREMENTS_DEFAULT_VALUE);
final List<String> outputMeasurementNameList =
outputMeasurementString.isEmpty()
? Collections.emptyList()
: Arrays.stream(outputMeasurementString.replace(" ", "").split(","))
.collect(Collectors.toList());
final Map<String, String> aggregatorName2OutputNameMap = new HashMap<>();
for (int i = 0; i < operatorNameList.size(); ++i) {
if (i < outputMeasurementNameList.size()) {
aggregatorName2OutputNameMap.put(
operatorNameList.get(i).toLowerCase(), outputMeasurementNameList.get(i));
} else {
aggregatorName2OutputNameMap.put(
operatorNameList.get(i).toLowerCase(), operatorNameList.get(i));
}
}
// Load the useful aggregators' and their corresponding intermediate results' computational
// logic.
final Set<String> declaredIntermediateResultSet = new HashSet<>();
final PipeDataRegionPluginAgent agent = PipeDataNodeAgent.plugin().dataRegion();
for (final String pipePluginName :
agent.getSubProcessorNamesWithSpecifiedParent(AbstractOperatorProcessor.class)) {
// Children are allowed to validate and configure the computational logic
// from the same parameters other than processor name
final AbstractOperatorProcessor operatorProcessor =
(AbstractOperatorProcessor)
agent.getConfiguredProcessor(pipePluginName, parameters, configuration);
operatorProcessor.getAggregatorOperatorSet().stream()
.filter(
operator ->
aggregatorName2OutputNameMap.containsKey(operator.getName().toLowerCase()))
.forEach(
operator -> {
outputName2OperatorMap.put(
aggregatorName2OutputNameMap.get(operator.getName().toLowerCase()), operator);
declaredIntermediateResultSet.addAll(operator.getDeclaredIntermediateValueNames());
});
operatorProcessor
.getIntermediateResultOperatorSupplierSet()
.forEach(
supplier ->
intermediateResultName2OperatorSupplierMap.put(
supplier.get().getName(), supplier));
operatorProcessors.add(operatorProcessor);
}
aggregatorName2OutputNameMap
.entrySet()
.removeIf(entry -> outputName2OperatorMap.containsKey(entry.getValue()));
if (!aggregatorName2OutputNameMap.isEmpty()) {
throw new PipeException(
String.format(
"The aggregator and output name %s is invalid.", aggregatorName2OutputNameMap));
}
intermediateResultName2OperatorSupplierMap.keySet().retainAll(declaredIntermediateResultSet);
declaredIntermediateResultSet.removeAll(intermediateResultName2OperatorSupplierMap.keySet());
if (!declaredIntermediateResultSet.isEmpty()) {
throw new PipeException(
String.format(
"The needed intermediate values %s are not defined.", declaredIntermediateResultSet));
}
// Set up column name strings
columnNameStringList = new String[outputName2OperatorMap.size()];
final List<String> operatorNames = new ArrayList<>(outputName2OperatorMap.keySet());
for (int i = 0; i < outputName2OperatorMap.size(); ++i) {
columnNameStringList[i] = operatorNames.get(i);
}
// Get windowing processor
final String processorName =
parameters.getStringOrDefault(
PROCESSOR_WINDOWING_STRATEGY_KEY, PROCESSOR_WINDOWING_STRATEGY_DEFAULT_VALUE)
+ WINDOWING_PROCESSOR_SUFFIX;
final PipeProcessor windowProcessor =
agent.getConfiguredProcessor(processorName, parameters, configuration);
if (!(windowProcessor instanceof AbstractWindowingProcessor)) {
throw new PipeException(
String.format("The processor %s is not a windowing processor.", processorName));
}
windowingProcessor = (AbstractWindowingProcessor) windowProcessor;
// Configure system parameters
systemParameters.put(
UDFParametersFactory.TIMESTAMP_PRECISION,
CommonDescriptor.getInstance().getConfig().getTimestampPrecision());
// The aggregated result operators can be configured here because they are global
// and stateless, needing only one configuration
this.outputName2OperatorMap
.values()
.forEach(operator -> operator.configureSystemParameters(systemParameters));
// Restore window state
final ProgressIndex index = pipeTaskMeta.getProgressIndex();
if (index == MinimumProgressIndex.INSTANCE) {
return;
}
if (!(index instanceof TimeWindowStateProgressIndex)) {
throw new PipeException(
String.format(
"The aggregate processor does not support progressIndexType %s", index.getType()));
}
final TimeWindowStateProgressIndex timeWindowStateProgressIndex =
(TimeWindowStateProgressIndex) index;
for (final Map.Entry<String, Pair<Long, ByteBuffer>> entry :
timeWindowStateProgressIndex.getTimeSeries2TimestampWindowBufferPairMap().entrySet()) {
final AtomicReference<TimeSeriesRuntimeState> stateReference =
pipeName2timeSeries2TimeSeriesRuntimeStateMap
.get(pipeName)
.computeIfAbsent(
entry.getKey(),
key ->
new AtomicReference<>(
new TimeSeriesRuntimeState(
outputName2OperatorMap,
intermediateResultName2OperatorSupplierMap,
systemParameters,
windowingProcessor)));
synchronized (stateReference) {
try {
stateReference.get().restoreTimestampAndWindows(entry.getValue());
} catch (final IOException e) {
throw new PipeException("Encountered exception when deserializing from PipeTaskMeta", e);
}
}
}
}
@Override
public void process(
final TabletInsertionEvent tabletInsertionEvent, final EventCollector eventCollector)
throws Exception {
if (!(tabletInsertionEvent instanceof PipeInsertNodeTabletInsertionEvent)
&& !(tabletInsertionEvent instanceof PipeRawTabletInsertionEvent)) {
eventCollector.collect(tabletInsertionEvent);
return;
}
pipeName2LastValueReceiveTimeMap
.computeIfAbsent(pipeName, key -> new AtomicLong(System.currentTimeMillis()))
.set(System.currentTimeMillis());
final AtomicReference<Exception> exception = new AtomicReference<>();
final TimeWindowStateProgressIndex[] progressIndex = {
new TimeWindowStateProgressIndex(new ConcurrentHashMap<>())
};
final Iterable<TabletInsertionEvent> outputEvents =
tabletInsertionEvent.processRowByRow(
(row, rowCollector) ->
progressIndex[0] =
(TimeWindowStateProgressIndex)
progressIndex[0].updateToMinimumEqualOrIsAfterProgressIndex(
new TimeWindowStateProgressIndex(
processRow(row, rowCollector, exception))));
// Must reset progressIndex before collection
((EnrichedEvent) tabletInsertionEvent).bindProgressIndex(progressIndex[0]);
outputEvents.forEach(
event -> {
try {
eventCollector.collect(event);
} catch (Exception e) {
exception.set(e);
}
});
if (Objects.nonNull(exception.get())) {
throw exception.get();
}
}
private Map<String, Pair<Long, ByteBuffer>> processRow(
final Row row, final RowCollector rowCollector, final AtomicReference<Exception> exception) {
final Map<String, Pair<Long, ByteBuffer>> resultMap = new HashMap<>();
final long timestamp = row.getTime();
for (int index = 0, size = row.size(); index < size; ++index) {
// Do not calculate null values
if (row.isNull(index)) {
continue;
}
// All the timeSeries we stored are without database here if the parameters "outputDatabase"
// is configured, because we do not support the same timeSeries (all the same except database)
// in that mode, without the database we can save space and prevent string replacing problems.
final String timeSeries =
(outputDatabaseWithPathSeparator.isEmpty()
? row.getDeviceId()
: row.getDeviceId().replaceFirst(databaseWithPathSeparator, ""))
+ TsFileConstant.PATH_SEPARATOR
+ row.getColumnName(index);
final AtomicReference<TimeSeriesRuntimeState> stateReference =
pipeName2timeSeries2TimeSeriesRuntimeStateMap
.get(pipeName)
.computeIfAbsent(
timeSeries,
key ->
new AtomicReference<>(
new TimeSeriesRuntimeState(
outputName2OperatorMap,
intermediateResultName2OperatorSupplierMap,
systemParameters,
windowingProcessor)));
final Pair<List<WindowOutput>, Pair<Long, ByteBuffer>> result;
synchronized (stateReference) {
final TimeSeriesRuntimeState state = stateReference.get();
try {
switch (row.getDataType(index)) {
case BOOLEAN:
result =
state.updateWindows(
timestamp, row.getBoolean(index), outputMinReportIntervalMilliseconds);
break;
case INT32:
result =
state.updateWindows(
timestamp, row.getInt(index), outputMinReportIntervalMilliseconds);
break;
case DATE:
result =
state.updateWindows(
timestamp, row.getDate(index), outputMinReportIntervalMilliseconds);
break;
case INT64:
case TIMESTAMP:
result =
state.updateWindows(
timestamp, row.getLong(index), outputMinReportIntervalMilliseconds);
break;
case FLOAT:
result =
state.updateWindows(
timestamp, row.getFloat(index), outputMinReportIntervalMilliseconds);
break;
case DOUBLE:
result =
state.updateWindows(
timestamp, row.getDouble(index), outputMinReportIntervalMilliseconds);
break;
case TEXT:
case STRING:
result =
state.updateWindows(
timestamp, row.getString(index), outputMinReportIntervalMilliseconds);
break;
case BLOB:
result =
state.updateWindows(
timestamp, row.getBinary(index), outputMinReportIntervalMilliseconds);
break;
default:
throw new UnsupportedOperationException(
String.format("The type %s is not supported", row.getDataType(index)));
}
if (Objects.nonNull(result)) {
collectWindowOutputs(result.getLeft(), timeSeries, rowCollector);
if (Objects.nonNull(result.getRight())) {
resultMap.put(timeSeries, result.getRight());
}
}
} catch (final IOException | UnsupportedOperationException e) {
exception.set(e);
}
}
}
return resultMap;
}
@Override
public void process(
final TsFileInsertionEvent tsFileInsertionEvent, final EventCollector eventCollector)
throws Exception {
try {
if (tsFileInsertionEvent instanceof PipeTsFileInsertionEvent) {
final AtomicReference<Exception> ex = new AtomicReference<>();
((PipeTsFileInsertionEvent) tsFileInsertionEvent)
.consumeTabletInsertionEventsWithRetry(
event -> {
try {
process(event, eventCollector);
} catch (Exception e) {
ex.set(e);
}
},
"AggregateProcessor::process");
if (ex.get() != null) {
throw ex.get();
}
} else {
for (final TabletInsertionEvent tabletInsertionEvent :
tsFileInsertionEvent.toTabletInsertionEvents()) {
process(tabletInsertionEvent, eventCollector);
}
}
} finally {
tsFileInsertionEvent.close();
}
// The timeProgressIndex shall only be reported by the output events
// whose progressIndex is bounded with tablet events
if (tsFileInsertionEvent instanceof PipeTsFileInsertionEvent) {
((PipeTsFileInsertionEvent) tsFileInsertionEvent).skipReportOnCommit();
}
}
@Override
public void process(final Event event, final EventCollector eventCollector) throws Exception {
final AtomicLong lastReceiveTime =
pipeName2LastValueReceiveTimeMap.computeIfAbsent(
pipeName, key -> new AtomicLong(System.currentTimeMillis()));
final long previousTime = lastReceiveTime.get();
if (System.currentTimeMillis() - previousTime > outputMaxDelayMilliseconds) {
final AtomicReference<Exception> exception = new AtomicReference<>();
pipeName2timeSeries2TimeSeriesRuntimeStateMap
.get(pipeName)
.keySet()
.forEach(
timeSeries -> {
final AtomicReference<TimeSeriesRuntimeState> stateReference =
pipeName2timeSeries2TimeSeriesRuntimeStateMap.get(pipeName).get(timeSeries);
synchronized (stateReference) {
final PipeRowCollector rowCollector =
new PipeRowCollector(pipeTaskMeta, null, dataBaseName, isTableModel);
try {
collectWindowOutputs(
stateReference.get().forceOutput(), timeSeries, rowCollector);
} catch (final IOException e) {
exception.set(e);
}
rowCollector
.convertToTabletInsertionEvents(false)
.forEach(
tabletEvent -> {
try {
eventCollector.collect(tabletEvent);
} catch (Exception e) {
exception.set(e);
}
});
}
});
if (exception.get() != null) {
// Retry at the fixed interval
lastReceiveTime.set(System.currentTimeMillis());
throw exception.get();
}
// Forbidding emitting results until next data comes
// If the last receive time has changed, it means new data has come
// thus the next output is needed
lastReceiveTime.compareAndSet(previousTime, Long.MAX_VALUE);
}
eventCollector.collect(event);
}
/**
* Collect {@link WindowOutput}s of a single timeSeries in one turn. The {@link TSDataType}s shall
* be the same because the {@link AggregatedResultOperator}s shall return the same value for the
* same timeSeries.
*
* @param outputs the {@link WindowOutput} output
* @param timeSeries the timeSeries‘ name
* @param collector {@link RowCollector}
*/
public void collectWindowOutputs(
final List<WindowOutput> outputs, final String timeSeries, final RowCollector collector)
throws IOException {
if (Objects.isNull(outputs) || outputs.isEmpty()) {
return;
}
// Sort and same timestamps removal
outputs.sort(Comparator.comparingLong(WindowOutput::getTimestamp));
final AtomicLong lastValue = new AtomicLong(Long.MIN_VALUE);
final List<WindowOutput> distinctOutputs = new ArrayList<>();
outputs.forEach(
output -> {
final long timeStamp = output.getTimestamp();
if (timeStamp != lastValue.get()) {
lastValue.set(timeStamp);
distinctOutputs.add(output);
}
});
final MeasurementSchema[] measurementSchemaList =
new MeasurementSchema[columnNameStringList.length];
final TSDataType[] valueColumnTypes = new TSDataType[columnNameStringList.length];
final Object[] valueColumns = new Object[columnNameStringList.length];
final BitMap[] bitMaps = new BitMap[columnNameStringList.length];
// Setup timestamps
final long[] timestampColumn = new long[distinctOutputs.size()];
for (int i = 0; i < distinctOutputs.size(); ++i) {
timestampColumn[i] = distinctOutputs.get(i).getTimestamp();
}
for (int columnIndex = 0; columnIndex < columnNameStringList.length; ++columnIndex) {
bitMaps[columnIndex] = new BitMap(distinctOutputs.size());
for (int rowIndex = 0; rowIndex < distinctOutputs.size(); ++rowIndex) {
final Map<String, Pair<TSDataType, Object>> aggregatedResults =
distinctOutputs.get(rowIndex).getAggregatedResults();
if (aggregatedResults.containsKey(columnNameStringList[columnIndex])) {
if (Objects.isNull(valueColumnTypes[columnIndex])) {
// Fill in measurements and init columns when the first non-null value is seen
valueColumnTypes[columnIndex] =
aggregatedResults.get(columnNameStringList[columnIndex]).getLeft();
measurementSchemaList[columnIndex] =
new MeasurementSchema(
columnNameStringList[columnIndex], valueColumnTypes[columnIndex]);
switch (valueColumnTypes[columnIndex]) {
case BOOLEAN:
valueColumns[columnIndex] = new boolean[distinctOutputs.size()];
break;
case INT32:
valueColumns[columnIndex] = new int[distinctOutputs.size()];
break;
case DATE:
valueColumns[columnIndex] = new LocalDate[distinctOutputs.size()];
break;
case INT64:
case TIMESTAMP:
valueColumns[columnIndex] = new long[distinctOutputs.size()];
break;
case FLOAT:
valueColumns[columnIndex] = new float[distinctOutputs.size()];
break;
case DOUBLE:
valueColumns[columnIndex] = new double[distinctOutputs.size()];
break;
case TEXT:
case BLOB:
case STRING:
valueColumns[columnIndex] = new Binary[distinctOutputs.size()];
break;
default:
throw new UnsupportedOperationException(
String.format(
"The output tablet does not support column type %s",
valueColumnTypes[columnIndex]));
}
}
// Fill in values
switch (valueColumnTypes[columnIndex]) {
case BOOLEAN:
((boolean[]) valueColumns[columnIndex])[rowIndex] =
(boolean) aggregatedResults.get(columnNameStringList[columnIndex]).getRight();
break;
case INT32:
((int[]) valueColumns[columnIndex])[rowIndex] =
(int) aggregatedResults.get(columnNameStringList[columnIndex]).getRight();
break;
case DATE:
((LocalDate[]) valueColumns[columnIndex])[rowIndex] =
(LocalDate) aggregatedResults.get(columnNameStringList[columnIndex]).getRight();
break;
case INT64:
case TIMESTAMP:
((long[]) valueColumns[columnIndex])[rowIndex] =
(long) aggregatedResults.get(columnNameStringList[columnIndex]).getRight();
break;
case FLOAT:
((float[]) valueColumns[columnIndex])[rowIndex] =
(float) aggregatedResults.get(columnNameStringList[columnIndex]).getRight();
break;
case DOUBLE:
((double[]) valueColumns[columnIndex])[rowIndex] =
(double) aggregatedResults.get(columnNameStringList[columnIndex]).getRight();
break;
case TEXT:
case STRING:
((Binary[]) valueColumns[columnIndex])[rowIndex] =
aggregatedResults.get(columnNameStringList[columnIndex]).getRight()
instanceof Binary
? (Binary) aggregatedResults.get(columnNameStringList[columnIndex]).getRight()
: new Binary(
(String)
aggregatedResults.get(columnNameStringList[columnIndex]).getRight(),
TSFileConfig.STRING_CHARSET);
break;
case BLOB:
((Binary[]) valueColumns[columnIndex])[rowIndex] =
(Binary) aggregatedResults.get(columnNameStringList[columnIndex]).getRight();
break;
default:
throw new UnsupportedOperationException(
String.format(
"The output tablet does not support column type %s",
valueColumnTypes[rowIndex]));
}
} else {
bitMaps[columnIndex].mark(rowIndex);
}
}
}
// Filter null outputs
final Integer[] originColumnIndex2FilteredColumnIndexMapperList =
new Integer[columnNameStringList.length];
int filteredCount = 0;
for (int i = 0; i < columnNameStringList.length; ++i) {
if (!bitMaps[i].isAllMarked()) {
originColumnIndex2FilteredColumnIndexMapperList[i] = ++filteredCount;
}
}
final String outputTimeSeries =
outputDatabaseWithPathSeparator.isEmpty()
? timeSeries
: outputDatabaseWithPathSeparator + timeSeries;
if (filteredCount == columnNameStringList.length) {
// No filter, collect rows
for (int rowIndex = 0; rowIndex < distinctOutputs.size(); ++rowIndex) {
collector.collectRow(
rowIndex == 0
? new PipeResetTabletRow(
rowIndex,
outputTimeSeries,
false,
measurementSchemaList,
timestampColumn,
valueColumnTypes,
valueColumns,
bitMaps,
columnNameStringList)
: new PipeRow(
rowIndex,
outputTimeSeries,
false,
measurementSchemaList,
timestampColumn,
valueColumnTypes,
valueColumns,
bitMaps,
columnNameStringList));
}
} else {
// Recompute the column arrays
final MeasurementSchema[] filteredMeasurementSchemaList =
new MeasurementSchema[filteredCount];
final String[] filteredColumnNameStringList = new String[filteredCount];
final TSDataType[] filteredValueColumnTypes = new TSDataType[filteredCount];
final Object[] filteredValueColumns = new Object[filteredCount];
final BitMap[] filteredBitMaps = new BitMap[filteredCount];
for (int i = 0; i < originColumnIndex2FilteredColumnIndexMapperList.length; i++) {
if (originColumnIndex2FilteredColumnIndexMapperList[i] != null) {
final int filteredColumnIndex = originColumnIndex2FilteredColumnIndexMapperList[i];
filteredMeasurementSchemaList[filteredColumnIndex] = measurementSchemaList[i];
filteredColumnNameStringList[filteredColumnIndex] = columnNameStringList[i];
filteredValueColumnTypes[filteredColumnIndex] = valueColumnTypes[i];
filteredBitMaps[filteredColumnIndex] = bitMaps[i];
filteredValueColumns[filteredColumnIndex] = valueColumns[i];
}
}
// Collect rows
for (int rowIndex = 0; rowIndex < distinctOutputs.size(); ++rowIndex) {
collector.collectRow(
rowIndex == 0
? new PipeResetTabletRow(
rowIndex,
outputTimeSeries,
false,
filteredMeasurementSchemaList,
timestampColumn,
filteredValueColumnTypes,
filteredValueColumns,
filteredBitMaps,
filteredColumnNameStringList)
: new PipeRow(
rowIndex,
outputTimeSeries,
false,
filteredMeasurementSchemaList,
timestampColumn,
filteredValueColumnTypes,
filteredValueColumns,
filteredBitMaps,
filteredColumnNameStringList));
}
}
}
@Override
public void close() throws Exception {
if (Objects.nonNull(pipeName)
&& pipeName2referenceCountMap.compute(
pipeName, (name, count) -> Objects.nonNull(count) ? count - 1 : 0)
== 0) {
pipeName2timeSeries2TimeSeriesRuntimeStateMap.get(pipeName).clear();
pipeName2timeSeries2TimeSeriesRuntimeStateMap.remove(pipeName);
pipeName2LastValueReceiveTimeMap.remove(pipeName);
}
if (Objects.nonNull(windowingProcessor)) {
windowingProcessor.close();
}
for (final PipeProcessor operatorProcessor : operatorProcessors) {
operatorProcessor.close();
}
}
}
|
googleapis/google-api-java-client-services | 38,307 | clients/google-api-services-slides/v1/1.30.1/com/google/api/services/slides/v1/Slides.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.slides.v1;
/**
* Service definition for Slides (v1).
*
* <p>
* Reads and writes Google Slides presentations.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://developers.google.com/slides/" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link SlidesRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class Slides extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15,
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.15 of google-api-client to run version " +
"1.30.10 of the Google Slides API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://slides.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Slides(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
Slides(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Presentations collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code Slides slides = new Slides(...);}
* {@code Slides.Presentations.List request = slides.presentations().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Presentations presentations() {
return new Presentations();
}
/**
* The "presentations" collection of methods.
*/
public class Presentations {
/**
* Applies one or more updates to the presentation. Each request is validated before being applied.
* If any request is not valid, then the entire request will fail and nothing will be applied. Some
* requests have replies to give you some information about how they are applied. Other requests do
* not need to return information; these each return an empty reply. The order of replies matches
* that of the requests. For example, suppose you call batchUpdate with four updates, and only the
* third one returns information. The response would have two empty replies: the reply to the third
* request, and another empty reply, in that order. Because other users may be editing the
* presentation, the presentation might not exactly reflect your changes: your changes may be
* altered with respect to collaborator changes. If there are no collaborators, the presentation
* should reflect your changes. In any case, the updates in your request are guaranteed to be
* applied together atomically.
*
* Create a request for the method "presentations.batchUpdate".
*
* This request holds the parameters needed by the slides server. After setting any optional
* parameters, call the {@link BatchUpdate#execute()} method to invoke the remote operation.
*
* @param presentationId The presentation to apply the updates to.
* @param content the {@link com.google.api.services.slides.v1.model.BatchUpdatePresentationRequest}
* @return the request
*/
public BatchUpdate batchUpdate(java.lang.String presentationId, com.google.api.services.slides.v1.model.BatchUpdatePresentationRequest content) throws java.io.IOException {
BatchUpdate result = new BatchUpdate(presentationId, content);
initialize(result);
return result;
}
public class BatchUpdate extends SlidesRequest<com.google.api.services.slides.v1.model.BatchUpdatePresentationResponse> {
private static final String REST_PATH = "v1/presentations/{presentationId}:batchUpdate";
/**
* Applies one or more updates to the presentation. Each request is validated before being
* applied. If any request is not valid, then the entire request will fail and nothing will be
* applied. Some requests have replies to give you some information about how they are applied.
* Other requests do not need to return information; these each return an empty reply. The order
* of replies matches that of the requests. For example, suppose you call batchUpdate with four
* updates, and only the third one returns information. The response would have two empty replies:
* the reply to the third request, and another empty reply, in that order. Because other users may
* be editing the presentation, the presentation might not exactly reflect your changes: your
* changes may be altered with respect to collaborator changes. If there are no collaborators, the
* presentation should reflect your changes. In any case, the updates in your request are
* guaranteed to be applied together atomically.
*
* Create a request for the method "presentations.batchUpdate".
*
* This request holds the parameters needed by the the slides server. After setting any optional
* parameters, call the {@link BatchUpdate#execute()} method to invoke the remote operation. <p>
* {@link
* BatchUpdate#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param presentationId The presentation to apply the updates to.
* @param content the {@link com.google.api.services.slides.v1.model.BatchUpdatePresentationRequest}
* @since 1.13
*/
protected BatchUpdate(java.lang.String presentationId, com.google.api.services.slides.v1.model.BatchUpdatePresentationRequest content) {
super(Slides.this, "POST", REST_PATH, content, com.google.api.services.slides.v1.model.BatchUpdatePresentationResponse.class);
this.presentationId = com.google.api.client.util.Preconditions.checkNotNull(presentationId, "Required parameter presentationId must be specified.");
}
@Override
public BatchUpdate set$Xgafv(java.lang.String $Xgafv) {
return (BatchUpdate) super.set$Xgafv($Xgafv);
}
@Override
public BatchUpdate setAccessToken(java.lang.String accessToken) {
return (BatchUpdate) super.setAccessToken(accessToken);
}
@Override
public BatchUpdate setAlt(java.lang.String alt) {
return (BatchUpdate) super.setAlt(alt);
}
@Override
public BatchUpdate setCallback(java.lang.String callback) {
return (BatchUpdate) super.setCallback(callback);
}
@Override
public BatchUpdate setFields(java.lang.String fields) {
return (BatchUpdate) super.setFields(fields);
}
@Override
public BatchUpdate setKey(java.lang.String key) {
return (BatchUpdate) super.setKey(key);
}
@Override
public BatchUpdate setOauthToken(java.lang.String oauthToken) {
return (BatchUpdate) super.setOauthToken(oauthToken);
}
@Override
public BatchUpdate setPrettyPrint(java.lang.Boolean prettyPrint) {
return (BatchUpdate) super.setPrettyPrint(prettyPrint);
}
@Override
public BatchUpdate setQuotaUser(java.lang.String quotaUser) {
return (BatchUpdate) super.setQuotaUser(quotaUser);
}
@Override
public BatchUpdate setUploadType(java.lang.String uploadType) {
return (BatchUpdate) super.setUploadType(uploadType);
}
@Override
public BatchUpdate setUploadProtocol(java.lang.String uploadProtocol) {
return (BatchUpdate) super.setUploadProtocol(uploadProtocol);
}
/** The presentation to apply the updates to. */
@com.google.api.client.util.Key
private java.lang.String presentationId;
/** The presentation to apply the updates to.
*/
public java.lang.String getPresentationId() {
return presentationId;
}
/** The presentation to apply the updates to. */
public BatchUpdate setPresentationId(java.lang.String presentationId) {
this.presentationId = presentationId;
return this;
}
@Override
public BatchUpdate set(String parameterName, Object value) {
return (BatchUpdate) super.set(parameterName, value);
}
}
/**
* Creates a blank presentation using the title given in the request. If a `presentationId` is
* provided, it is used as the ID of the new presentation. Otherwise, a new ID is generated. Other
* fields in the request, including any provided content, are ignored. Returns the created
* presentation.
*
* Create a request for the method "presentations.create".
*
* This request holds the parameters needed by the slides server. After setting any optional
* parameters, call the {@link Create#execute()} method to invoke the remote operation.
*
* @param content the {@link com.google.api.services.slides.v1.model.Presentation}
* @return the request
*/
public Create create(com.google.api.services.slides.v1.model.Presentation content) throws java.io.IOException {
Create result = new Create(content);
initialize(result);
return result;
}
public class Create extends SlidesRequest<com.google.api.services.slides.v1.model.Presentation> {
private static final String REST_PATH = "v1/presentations";
/**
* Creates a blank presentation using the title given in the request. If a `presentationId` is
* provided, it is used as the ID of the new presentation. Otherwise, a new ID is generated. Other
* fields in the request, including any provided content, are ignored. Returns the created
* presentation.
*
* Create a request for the method "presentations.create".
*
* This request holds the parameters needed by the the slides server. After setting any optional
* parameters, call the {@link Create#execute()} method to invoke the remote operation. <p> {@link
* Create#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param content the {@link com.google.api.services.slides.v1.model.Presentation}
* @since 1.13
*/
protected Create(com.google.api.services.slides.v1.model.Presentation content) {
super(Slides.this, "POST", REST_PATH, content, com.google.api.services.slides.v1.model.Presentation.class);
}
@Override
public Create set$Xgafv(java.lang.String $Xgafv) {
return (Create) super.set$Xgafv($Xgafv);
}
@Override
public Create setAccessToken(java.lang.String accessToken) {
return (Create) super.setAccessToken(accessToken);
}
@Override
public Create setAlt(java.lang.String alt) {
return (Create) super.setAlt(alt);
}
@Override
public Create setCallback(java.lang.String callback) {
return (Create) super.setCallback(callback);
}
@Override
public Create setFields(java.lang.String fields) {
return (Create) super.setFields(fields);
}
@Override
public Create setKey(java.lang.String key) {
return (Create) super.setKey(key);
}
@Override
public Create setOauthToken(java.lang.String oauthToken) {
return (Create) super.setOauthToken(oauthToken);
}
@Override
public Create setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Create) super.setPrettyPrint(prettyPrint);
}
@Override
public Create setQuotaUser(java.lang.String quotaUser) {
return (Create) super.setQuotaUser(quotaUser);
}
@Override
public Create setUploadType(java.lang.String uploadType) {
return (Create) super.setUploadType(uploadType);
}
@Override
public Create setUploadProtocol(java.lang.String uploadProtocol) {
return (Create) super.setUploadProtocol(uploadProtocol);
}
@Override
public Create set(String parameterName, Object value) {
return (Create) super.set(parameterName, value);
}
}
/**
* Gets the latest version of the specified presentation.
*
* Create a request for the method "presentations.get".
*
* This request holds the parameters needed by the slides server. After setting any optional
* parameters, call the {@link Get#execute()} method to invoke the remote operation.
*
* @param presentationId The ID of the presentation to retrieve.
* @return the request
*/
public Get get(java.lang.String presentationId) throws java.io.IOException {
Get result = new Get(presentationId);
initialize(result);
return result;
}
public class Get extends SlidesRequest<com.google.api.services.slides.v1.model.Presentation> {
private static final String REST_PATH = "v1/presentations/{+presentationId}";
private final java.util.regex.Pattern PRESENTATION_ID_PATTERN =
java.util.regex.Pattern.compile("^[^/]+$");
/**
* Gets the latest version of the specified presentation.
*
* Create a request for the method "presentations.get".
*
* This request holds the parameters needed by the the slides server. After setting any optional
* parameters, call the {@link Get#execute()} method to invoke the remote operation. <p> {@link
* Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @param presentationId The ID of the presentation to retrieve.
* @since 1.13
*/
protected Get(java.lang.String presentationId) {
super(Slides.this, "GET", REST_PATH, null, com.google.api.services.slides.v1.model.Presentation.class);
this.presentationId = com.google.api.client.util.Preconditions.checkNotNull(presentationId, "Required parameter presentationId must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PRESENTATION_ID_PATTERN.matcher(presentationId).matches(),
"Parameter presentationId must conform to the pattern " +
"^[^/]+$");
}
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public Get set$Xgafv(java.lang.String $Xgafv) {
return (Get) super.set$Xgafv($Xgafv);
}
@Override
public Get setAccessToken(java.lang.String accessToken) {
return (Get) super.setAccessToken(accessToken);
}
@Override
public Get setAlt(java.lang.String alt) {
return (Get) super.setAlt(alt);
}
@Override
public Get setCallback(java.lang.String callback) {
return (Get) super.setCallback(callback);
}
@Override
public Get setFields(java.lang.String fields) {
return (Get) super.setFields(fields);
}
@Override
public Get setKey(java.lang.String key) {
return (Get) super.setKey(key);
}
@Override
public Get setOauthToken(java.lang.String oauthToken) {
return (Get) super.setOauthToken(oauthToken);
}
@Override
public Get setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Get) super.setPrettyPrint(prettyPrint);
}
@Override
public Get setQuotaUser(java.lang.String quotaUser) {
return (Get) super.setQuotaUser(quotaUser);
}
@Override
public Get setUploadType(java.lang.String uploadType) {
return (Get) super.setUploadType(uploadType);
}
@Override
public Get setUploadProtocol(java.lang.String uploadProtocol) {
return (Get) super.setUploadProtocol(uploadProtocol);
}
/** The ID of the presentation to retrieve. */
@com.google.api.client.util.Key
private java.lang.String presentationId;
/** The ID of the presentation to retrieve.
*/
public java.lang.String getPresentationId() {
return presentationId;
}
/** The ID of the presentation to retrieve. */
public Get setPresentationId(java.lang.String presentationId) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(PRESENTATION_ID_PATTERN.matcher(presentationId).matches(),
"Parameter presentationId must conform to the pattern " +
"^[^/]+$");
}
this.presentationId = presentationId;
return this;
}
@Override
public Get set(String parameterName, Object value) {
return (Get) super.set(parameterName, value);
}
}
/**
* An accessor for creating requests from the Pages collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code Slides slides = new Slides(...);}
* {@code Slides.Pages.List request = slides.pages().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Pages pages() {
return new Pages();
}
/**
* The "pages" collection of methods.
*/
public class Pages {
/**
* Gets the latest version of the specified page in the presentation.
*
* Create a request for the method "pages.get".
*
* This request holds the parameters needed by the slides server. After setting any optional
* parameters, call the {@link Get#execute()} method to invoke the remote operation.
*
* @param presentationId The ID of the presentation to retrieve.
* @param pageObjectId The object ID of the page to retrieve.
* @return the request
*/
public Get get(java.lang.String presentationId, java.lang.String pageObjectId) throws java.io.IOException {
Get result = new Get(presentationId, pageObjectId);
initialize(result);
return result;
}
public class Get extends SlidesRequest<com.google.api.services.slides.v1.model.Page> {
private static final String REST_PATH = "v1/presentations/{presentationId}/pages/{pageObjectId}";
/**
* Gets the latest version of the specified page in the presentation.
*
* Create a request for the method "pages.get".
*
* This request holds the parameters needed by the the slides server. After setting any optional
* parameters, call the {@link Get#execute()} method to invoke the remote operation. <p> {@link
* Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @param presentationId The ID of the presentation to retrieve.
* @param pageObjectId The object ID of the page to retrieve.
* @since 1.13
*/
protected Get(java.lang.String presentationId, java.lang.String pageObjectId) {
super(Slides.this, "GET", REST_PATH, null, com.google.api.services.slides.v1.model.Page.class);
this.presentationId = com.google.api.client.util.Preconditions.checkNotNull(presentationId, "Required parameter presentationId must be specified.");
this.pageObjectId = com.google.api.client.util.Preconditions.checkNotNull(pageObjectId, "Required parameter pageObjectId must be specified.");
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public Get set$Xgafv(java.lang.String $Xgafv) {
return (Get) super.set$Xgafv($Xgafv);
}
@Override
public Get setAccessToken(java.lang.String accessToken) {
return (Get) super.setAccessToken(accessToken);
}
@Override
public Get setAlt(java.lang.String alt) {
return (Get) super.setAlt(alt);
}
@Override
public Get setCallback(java.lang.String callback) {
return (Get) super.setCallback(callback);
}
@Override
public Get setFields(java.lang.String fields) {
return (Get) super.setFields(fields);
}
@Override
public Get setKey(java.lang.String key) {
return (Get) super.setKey(key);
}
@Override
public Get setOauthToken(java.lang.String oauthToken) {
return (Get) super.setOauthToken(oauthToken);
}
@Override
public Get setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Get) super.setPrettyPrint(prettyPrint);
}
@Override
public Get setQuotaUser(java.lang.String quotaUser) {
return (Get) super.setQuotaUser(quotaUser);
}
@Override
public Get setUploadType(java.lang.String uploadType) {
return (Get) super.setUploadType(uploadType);
}
@Override
public Get setUploadProtocol(java.lang.String uploadProtocol) {
return (Get) super.setUploadProtocol(uploadProtocol);
}
/** The ID of the presentation to retrieve. */
@com.google.api.client.util.Key
private java.lang.String presentationId;
/** The ID of the presentation to retrieve.
*/
public java.lang.String getPresentationId() {
return presentationId;
}
/** The ID of the presentation to retrieve. */
public Get setPresentationId(java.lang.String presentationId) {
this.presentationId = presentationId;
return this;
}
/** The object ID of the page to retrieve. */
@com.google.api.client.util.Key
private java.lang.String pageObjectId;
/** The object ID of the page to retrieve.
*/
public java.lang.String getPageObjectId() {
return pageObjectId;
}
/** The object ID of the page to retrieve. */
public Get setPageObjectId(java.lang.String pageObjectId) {
this.pageObjectId = pageObjectId;
return this;
}
@Override
public Get set(String parameterName, Object value) {
return (Get) super.set(parameterName, value);
}
}
/**
* Generates a thumbnail of the latest version of the specified page in the presentation and returns
* a URL to the thumbnail image. This request counts as an [expensive read request](/slides/limits)
* for quota purposes.
*
* Create a request for the method "pages.getThumbnail".
*
* This request holds the parameters needed by the slides server. After setting any optional
* parameters, call the {@link GetThumbnail#execute()} method to invoke the remote operation.
*
* @param presentationId The ID of the presentation to retrieve.
* @param pageObjectId The object ID of the page whose thumbnail to retrieve.
* @return the request
*/
public GetThumbnail getThumbnail(java.lang.String presentationId, java.lang.String pageObjectId) throws java.io.IOException {
GetThumbnail result = new GetThumbnail(presentationId, pageObjectId);
initialize(result);
return result;
}
public class GetThumbnail extends SlidesRequest<com.google.api.services.slides.v1.model.Thumbnail> {
private static final String REST_PATH = "v1/presentations/{presentationId}/pages/{pageObjectId}/thumbnail";
/**
* Generates a thumbnail of the latest version of the specified page in the presentation and
* returns a URL to the thumbnail image. This request counts as an [expensive read
* request](/slides/limits) for quota purposes.
*
* Create a request for the method "pages.getThumbnail".
*
* This request holds the parameters needed by the the slides server. After setting any optional
* parameters, call the {@link GetThumbnail#execute()} method to invoke the remote operation. <p>
* {@link
* GetThumbnail#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param presentationId The ID of the presentation to retrieve.
* @param pageObjectId The object ID of the page whose thumbnail to retrieve.
* @since 1.13
*/
protected GetThumbnail(java.lang.String presentationId, java.lang.String pageObjectId) {
super(Slides.this, "GET", REST_PATH, null, com.google.api.services.slides.v1.model.Thumbnail.class);
this.presentationId = com.google.api.client.util.Preconditions.checkNotNull(presentationId, "Required parameter presentationId must be specified.");
this.pageObjectId = com.google.api.client.util.Preconditions.checkNotNull(pageObjectId, "Required parameter pageObjectId must be specified.");
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public GetThumbnail set$Xgafv(java.lang.String $Xgafv) {
return (GetThumbnail) super.set$Xgafv($Xgafv);
}
@Override
public GetThumbnail setAccessToken(java.lang.String accessToken) {
return (GetThumbnail) super.setAccessToken(accessToken);
}
@Override
public GetThumbnail setAlt(java.lang.String alt) {
return (GetThumbnail) super.setAlt(alt);
}
@Override
public GetThumbnail setCallback(java.lang.String callback) {
return (GetThumbnail) super.setCallback(callback);
}
@Override
public GetThumbnail setFields(java.lang.String fields) {
return (GetThumbnail) super.setFields(fields);
}
@Override
public GetThumbnail setKey(java.lang.String key) {
return (GetThumbnail) super.setKey(key);
}
@Override
public GetThumbnail setOauthToken(java.lang.String oauthToken) {
return (GetThumbnail) super.setOauthToken(oauthToken);
}
@Override
public GetThumbnail setPrettyPrint(java.lang.Boolean prettyPrint) {
return (GetThumbnail) super.setPrettyPrint(prettyPrint);
}
@Override
public GetThumbnail setQuotaUser(java.lang.String quotaUser) {
return (GetThumbnail) super.setQuotaUser(quotaUser);
}
@Override
public GetThumbnail setUploadType(java.lang.String uploadType) {
return (GetThumbnail) super.setUploadType(uploadType);
}
@Override
public GetThumbnail setUploadProtocol(java.lang.String uploadProtocol) {
return (GetThumbnail) super.setUploadProtocol(uploadProtocol);
}
/** The ID of the presentation to retrieve. */
@com.google.api.client.util.Key
private java.lang.String presentationId;
/** The ID of the presentation to retrieve.
*/
public java.lang.String getPresentationId() {
return presentationId;
}
/** The ID of the presentation to retrieve. */
public GetThumbnail setPresentationId(java.lang.String presentationId) {
this.presentationId = presentationId;
return this;
}
/** The object ID of the page whose thumbnail to retrieve. */
@com.google.api.client.util.Key
private java.lang.String pageObjectId;
/** The object ID of the page whose thumbnail to retrieve.
*/
public java.lang.String getPageObjectId() {
return pageObjectId;
}
/** The object ID of the page whose thumbnail to retrieve. */
public GetThumbnail setPageObjectId(java.lang.String pageObjectId) {
this.pageObjectId = pageObjectId;
return this;
}
/**
* The optional mime type of the thumbnail image. If you don't specify the mime type, the
* mime type defaults to PNG.
*/
@com.google.api.client.util.Key("thumbnailProperties.mimeType")
private java.lang.String thumbnailPropertiesMimeType;
/** The optional mime type of the thumbnail image. If you don't specify the mime type, the mime type
defaults to PNG.
*/
public java.lang.String getThumbnailPropertiesMimeType() {
return thumbnailPropertiesMimeType;
}
/**
* The optional mime type of the thumbnail image. If you don't specify the mime type, the
* mime type defaults to PNG.
*/
public GetThumbnail setThumbnailPropertiesMimeType(java.lang.String thumbnailPropertiesMimeType) {
this.thumbnailPropertiesMimeType = thumbnailPropertiesMimeType;
return this;
}
/**
* The optional thumbnail image size. If you don't specify the size, the server chooses a
* default size of the image.
*/
@com.google.api.client.util.Key("thumbnailProperties.thumbnailSize")
private java.lang.String thumbnailPropertiesThumbnailSize;
/** The optional thumbnail image size. If you don't specify the size, the server chooses a default size
of the image.
*/
public java.lang.String getThumbnailPropertiesThumbnailSize() {
return thumbnailPropertiesThumbnailSize;
}
/**
* The optional thumbnail image size. If you don't specify the size, the server chooses a
* default size of the image.
*/
public GetThumbnail setThumbnailPropertiesThumbnailSize(java.lang.String thumbnailPropertiesThumbnailSize) {
this.thumbnailPropertiesThumbnailSize = thumbnailPropertiesThumbnailSize;
return this;
}
@Override
public GetThumbnail set(String parameterName, Object value) {
return (GetThumbnail) super.set(parameterName, value);
}
}
}
}
/**
* Builder for {@link Slides}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
DEFAULT_ROOT_URL,
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link Slides}. */
@Override
public Slides build() {
return new Slides(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link SlidesRequestInitializer}.
*
* @since 1.12
*/
public Builder setSlidesRequestInitializer(
SlidesRequestInitializer slidesRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(slidesRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
google/closure-compiler | 37,269 | test/com/google/javascript/jscomp/lint/CheckJSDocStyleTest.java | /*
* Copyright 2015 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp.lint;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.CLASS_DISALLOWED_JSDOC;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.EXTERNS_FILES_SHOULD_BE_ANNOTATED;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.INCORRECT_ANNOTATION_ON_GETTER_SETTER;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.INCORRECT_PARAM_NAME;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.LICENSE_CONTAINS_AT_EXTERNS;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.MISSING_JSDOC;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.MISSING_PARAMETER_JSDOC;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.MISSING_RETURN_JSDOC;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.MIXED_PARAM_JSDOC_STYLES;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.OPTIONAL_PARAM_NOT_MARKED_OPTIONAL;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.PREFER_BACKTICKS_TO_AT_SIGN_CODE;
import static com.google.javascript.jscomp.lint.CheckJSDocStyle.WRONG_NUMBER_OF_PARAMS;
import com.google.javascript.jscomp.CheckLevel;
import com.google.javascript.jscomp.ClosureCodingConvention;
import com.google.javascript.jscomp.CodingConvention;
import com.google.javascript.jscomp.Compiler;
import com.google.javascript.jscomp.CompilerOptions;
import com.google.javascript.jscomp.CompilerPass;
import com.google.javascript.jscomp.CompilerTestCase;
import com.google.javascript.jscomp.GoogleCodingConvention;
import com.google.javascript.jscomp.parsing.Config;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Test case for {@link CheckJSDocStyle}. */
@RunWith(JUnit4.class)
public final class CheckJSDocStyleTest extends CompilerTestCase {
public CheckJSDocStyleTest() {
super("/** @fileoverview\n * @externs\n */");
}
private CodingConvention codingConvention;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
codingConvention = new GoogleCodingConvention();
}
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return new CheckJSDocStyle(compiler);
}
@Override
protected CompilerOptions getOptions() {
CompilerOptions options = super.getOptions();
options.setParseJsDocDocumentation(Config.JsDocParsing.INCLUDE_DESCRIPTIONS_NO_WHITESPACE);
options.setWarningLevel(CheckJSDocStyle.ALL_DIAGNOSTICS, CheckLevel.WARNING);
return options;
}
@Override
protected CodingConvention getCodingConvention() {
return codingConvention;
}
@Test
public void testValidSuppress_onDeclaration() {
testSame("/** @const */ var global = this;");
testSame("/** @const */ goog.global = this;");
}
@Test
public void testValidSuppress_withES6Modules01() {
testSame("export /** @suppress {missingRequire} */ var x = new y.Z();");
}
@Test
public void testValidSuppress_withES6Modules03() {
testSame("export /** @const @suppress {duplicate} */ var google = {};");
}
@Test
public void testExtraneousClassAnnotations() {
testWarning(
"""
/**
* @constructor
*/
var X = class {};
""",
CLASS_DISALLOWED_JSDOC);
testWarning(
"""
/**
* @constructor
*/
class X {};
""",
CLASS_DISALLOWED_JSDOC);
// TODO(tbreisacher): Warn for @extends too. We need to distinguish between cases like this
// which are totally redundant...
testSame(
"""
/**
* @extends {Y}
*/
class X extends Y {};
""");
// ... and ones like this which are not.
testSame(
"""
/**
* @extends {Y<number>}
*/
class X extends Y {};
""");
testSame(
"""
/**
* @implements {Z}
*/
class X extends Y {};
""");
testSame(
"""
/**
* @interface
* @extends {Y}
*/
class X extends Y {};
""");
testSame(
"""
/**
* @record
* @extends {Y}
*/
class X extends Y {};
""");
}
@Test
public void testInvalidExtraneousClassAnnotations_withES6Modules() {
testWarning(
"""
export
/**
* @constructor
*/
var X = class {};
""",
CLASS_DISALLOWED_JSDOC);
}
@Test
public void testValidExtraneousClassAnnotations_withES6Modules() {
testSame("export /** @extends {Y} */ class X extends Y {};");
}
@Test
public void testNestedArrowFunctions() {
testSame(
"""
/**
* @param {Object} a
* @return {function(Object): boolean}
*/
var haskellStyleEquals = a => b => a == b;
""");
}
@Test
public void testNestedArrowFunctions_withES6Modules() {
testSame(
"""
export
/**
* @param {Object} a
* @return {function(Object): boolean}
*/
var haskellStyleEquals = a => b => a == b;
""");
}
@Test
public void testGetterSetterMissingJsDoc() {
testWarning("class Foo { get twentyone() { return 21; } }", MISSING_JSDOC);
testWarning("class Foo { set someString(s) { this.someString_ = s; } }", MISSING_JSDOC);
testSame("class Foo { /** @return {number} */ get twentyone() { return 21; } }");
testSame("class Foo { /** @param {string} s */ set someString(s) { this.someString_ = s; } }");
}
@Test
public void testTypeAnnotationOnGetterSetter() {
testWarning(
"class Foo { /** @type {number} */ get twentyone() { return 21; } }",
INCORRECT_ANNOTATION_ON_GETTER_SETTER);
testWarning(
"class Foo { /** @type {string} s */ set someString(s) { this.someString_ = s; } }",
INCORRECT_ANNOTATION_ON_GETTER_SETTER);
testNoWarning("class Foo { set someString( /** string */ s) { this.someString_ = s; } }");
}
@Test
public void testGetterSetter_withES6Modules() {
testSame("export class Foo { /** @return {number} */ get twentyone() { return 21; } }");
}
@Test
public void testMissingJsDoc() {
testWarning("function f() {}", MISSING_JSDOC);
testWarning("var f = function() {}", MISSING_JSDOC);
testWarning("let f = function() {}", MISSING_JSDOC);
testWarning("const f = function() {}", MISSING_JSDOC);
testWarning("foo.bar = function() {}", MISSING_JSDOC);
testWarning("Foo.prototype.bar = function() {}", MISSING_JSDOC);
testWarning("class Foo { bar() {} }", MISSING_JSDOC);
testWarning("class Foo { constructor(x) {} }", MISSING_JSDOC);
testWarning("var Foo = class { bar() {} };", MISSING_JSDOC);
testWarning("if (COMPILED) { var f = function() {}; }", MISSING_JSDOC);
testWarning("var f = async function() {};", MISSING_JSDOC);
testWarning("async function f() {};", MISSING_JSDOC);
testWarning("Polymer({ method() {} });", MISSING_JSDOC);
testWarning("Polymer({ method: function() {} });", MISSING_JSDOC);
testSame("/** @return {string} */ function f() {}");
testSame("/** @return {string} */ var f = function() {}");
testSame("/** @return {string} */ let f = function() {}");
testSame("/** @return {string} */ const f = function() {}");
testSame("/** @return {string} */ foo.bar = function() {}");
testSame("/** @return {string} */ Foo.prototype.bar = function() {}");
testSame("class Foo { /** @return {string} */ bar() {} }");
testSame("class Foo { constructor(/** string */ x) {} }");
testSame("var Foo = class { /** @return {string} */ bar() {} };");
testSame("/** @param {string} s */ var f = async function(s) {};");
testSame("/** @param {string} s */ async function f(s) {};");
testSame("Polymer({ /** @return {null} */ method() {} });");
testSame("Polymer({ /** @return {null} */ method: function() {} });");
}
@Test
public void testMissingJsDoc_withES6Modules01() {
testWarning("export function f() {}", MISSING_JSDOC);
}
@Test
public void testMissingJsDoc_withES6Modules02() {
testWarning("export var f = function() {}", MISSING_JSDOC);
}
@Test
public void testMissingJsDoc_withES6Modules03() {
testWarning("export let f = function() {}", MISSING_JSDOC);
}
@Test
public void testMissingJsDoc_withES6Modules04() {
testWarning("export const f = function() {}", MISSING_JSDOC);
}
@Test
public void testMissingJsDoc_withES6Modules09() {
testWarning("export var f = async function() {};", MISSING_JSDOC);
}
@Test
public void testMissingJsDoc_noWarningIfInlineJsDocIsPresent() {
testSame("function /** string */ f() {}");
testSame("function f(/** string */ x) {}");
testSame("var f = function(/** string */ x) {}");
testSame("let f = function(/** string */ x) {}");
testSame("const f = function(/** string */ x) {}");
testSame("foo.bar = function(/** string */ x) {}");
testSame("Foo.prototype.bar = function(/** string */ x) {}");
testSame("class Foo { bar(/** string */ x) {} }");
testSame("var Foo = class { bar(/** string */ x) {} };");
}
@Test
public void testMissingJsDoc_noWarningIfInlineJsDocIsPresent_withES6Modules() {
testSame("export function /** string */ f() {}");
}
@Test
public void testMissingJsDoc_noWarningIfNotTopLevel() {
testSame(inIIFE("function f() {}"));
testSame(inIIFE("var f = function() {}"));
testSame(inIIFE("let f = function() {}"));
testSame(inIIFE("const f = function() {}"));
testSame(inIIFE("foo.bar = function() {}"));
testSame(inIIFE("class Foo { bar() {} }"));
testSame(inIIFE("var Foo = class { bar() {} };"));
testSame("myArray.forEach(function(elem) { alert(elem); });");
testSame(
"""
Polymer({
is: 'example-elem',
/** @return {null} */
someMethod: function() {},
});
""");
testSame(
"""
Polymer({
is: 'example-elem',
/** @return {null} */
someMethod() {},
});
""");
}
@Test
public void testMissingJsDoc_noWarningIfNotTopLevelAndNoParams() {
testSame(
"""
describe('a karma test', function() {
/** @ngInject */
var helperFunction = function($compile, $rootScope) {};
})
""");
}
@Test
public void testMissingJsDoc_noWarning_wizConstructorAndDeps() {
// Exempt Wiz controller constructor and deps() method because Wiz automatically adds JSDoc
// NOTE(lharker@): right now this does not warn because of b/124061048: the behavior is correct
// but for the wrong reason.
testSame(
"""
goog.module('a.b.MyController');
class MyController extends SomeParentController {
static deps() { return {model: 0}; }
constructor({model}) {}
}
registerController(MY_CONTROLLER, MyController);
""");
}
@Test
public void testMissingJsDoc_noWarningOnTestFunctions() {
testSame("function testSomeFunctionality() {}");
testSame("var testSomeFunctionality = function() {};");
testSame("let testSomeFunctionality = function() {};");
testSame("window.testSomeFunctionality = function() {};");
testSame("const testSomeFunctionality = function() {};");
testSame("function setUp() {}");
testSame("function tearDown() {}");
testSame("var setUp = function() {};");
testSame("var tearDown = function() {};");
}
@Test
public void testMissingJsDoc_noWarningOnTestMethods() {
testSame("class MyClass { testSomeFunctionality() {} }");
testSame("goog.module('mod'); class MyClass { testSomeFunctionality() {} }");
testSame("a.b.c = class { testSomeFunctionality() {} }");
testSame("class MyClass { setUp() {} }");
testSame("class MyClass { tearDown() {} }");
}
@Test
public void testMissingJsDoc_noWarningOnTestFunctions_withES6Modules() {
testSame("export function testSomeFunctionality() {}");
}
@Test
public void testMissingJsDoc_noWarningOnEmptyConstructor() {
testSame("class Foo { constructor() {} }");
}
@Test
public void testMissingJsDoc_noWarningOnEmptyConstructor_withES6Modules() {
testSame("export class Foo { constructor() {} }");
}
@Test
public void testMissingJsDoc_googModule() {
testWarning("goog.module('a.b.c'); function f() {}", MISSING_JSDOC);
testWarning("goog.module('a.b.c'); var f = function() {};", MISSING_JSDOC);
// TODO(b/124061048): these should also warn for missing JSDoc
testSame("goog.module('a.b.c'); class Foo { constructor(x) {} }");
testSame("goog.module('a.b.c'); class Foo { someMethod() {} }");
}
@Test
public void testMissingJsDoc_ES6Module01() {
testWarning("export default abc; function f() {}", MISSING_JSDOC);
}
@Test
public void testMissingJsDoc_ES6Module02() {
testWarning("export default abc; var f = function() {};", MISSING_JSDOC);
}
@Test
public void testMissingJsDoc_ES6Module03() {
testWarning("export function f() {};", MISSING_JSDOC);
}
@Test
public void testMissingJsDoc_ES6Module04() {
testWarning("export default function () {}", MISSING_JSDOC);
}
@Test
public void testMissingJsDoc_ES6Module05() {
testWarning("export default (foo) => { alert(foo); }", MISSING_JSDOC);
}
@Test
public void testMissingJsDoc_googModule_noWarning() {
testSame("goog.module('a.b.c'); /** @type {function()} */ function f() {}");
testSame("goog.module('a.b.c'); /** @type {function()} */ var f = function() {};");
// No param constructors do not require JSDoc
testSame("goog.module('a.b.c'); class Foo { constructor() {} }");
}
@Test
public void testMissingJsDoc_ES6Module_noWarning01() {
testSame("export default abc; /** @type {function()} */ function f() {}");
}
@Test
public void testMissingJsDoc_ES6Module_noWarning02() {
testSame("export default abc; /** @type {function()} */ var f = function() {};");
}
private static String inIIFE(String js) {
return "(function() {\n" + js + "\n})()";
}
@Test
public void testMissingParam_noWarning() {
testSame(
"""
/**
* @param {string} x
* @param {string} y
*/
function f(x, y) {}
""");
testSame(
"""
/**
* @param {string=} x
*/
function f(x = 1) {}
""");
testSame(
"""
/**
* @param {number=} x
* @param {number=} y
* @param {number=} z
*/
function f(x = 1, y = 2, z = 3) {}
""");
testSame(
"""
/**
* @param {...string} args
*/
function f(...args) {}
""");
testSame(
"""
(function() {
myArray.forEach(function(elem) { alert(elem); });
})();
""");
testSame(
"""
(function() {
myArray.forEach(elem => alert(elem));
})();
""");
testSame("/** @type {function(number)} */ function f(x) {}");
testSame("function f(/** string */ inlineArg) {}");
testSame("/** @export */ function f(/** string */ inlineArg) {}");
testSame("class Foo { constructor(/** string */ inlineArg) {} }");
testSame("class Foo { method(/** string */ inlineArg) {} }");
testSame("/** @export */ class Foo { constructor(/** string */ inlineArg) {} }");
testSame("class Foo { /** @export */ method(/** string */ inlineArg) {} }");
}
@Test
public void testMissingParam_noWarning_withES6Modules() {
testSame("export class Foo { /** @export */ method(/** string */ inlineArg) {} }");
}
@Test
public void testMissingParam() {
testWarning(
"""
/**
* @param {string} x
// No @param for y.
*/
function f(x, y) {}
""",
WRONG_NUMBER_OF_PARAMS);
testWarning(
"""
/**
* @param {string} x
*/
function f(x = 1) {}
""",
OPTIONAL_PARAM_NOT_MARKED_OPTIONAL);
testWarning(
"""
/**
* @param {string} x
// No @param for y.
*/
function f(x, y = 1) {}
""",
WRONG_NUMBER_OF_PARAMS);
testWarning("function f(/** string */ x, y) {}", MISSING_PARAMETER_JSDOC);
testWarning("function f(x, /** string */ y) {}", MISSING_PARAMETER_JSDOC);
testWarning("function /** string */ f(x) {}", MISSING_PARAMETER_JSDOC);
testWarning(inIIFE("function f(/** string */ x, y) {}"), MISSING_PARAMETER_JSDOC);
testWarning(inIIFE("function f(x, /** string */ y) {}"), MISSING_PARAMETER_JSDOC);
testWarning(inIIFE("function /** string */ f(x) {}"), MISSING_PARAMETER_JSDOC);
}
@Test
public void testMissingParam_withES6Modules01() {
testWarning(
"""
export
/**
* @param {string} x
// No @param for y.
*/
function f(x, y) {}
""",
WRONG_NUMBER_OF_PARAMS);
}
@Test
public void testMissingParam_withES6Modules02() {
testWarning(
"export /** @param {string} x */ function f(x = 1) {}", OPTIONAL_PARAM_NOT_MARKED_OPTIONAL);
}
@Test
public void testMissingParam_withES6Modules03() {
testWarning("export function f(/** string */ x, y) {}", MISSING_PARAMETER_JSDOC);
}
@Test
public void testMissingParamWithDestructuringPattern() {
testWarning(
"""
/**
* @param {string} namedParam
* @return {void}
*/
function f(namedParam, {destructuring:pattern}) {
}
""",
WRONG_NUMBER_OF_PARAMS);
testWarning(
"""
/**
* @param {string} namedParam
* @return {void}
*/
function f({destructuring:pattern}, namedParam) {
}
""",
WRONG_NUMBER_OF_PARAMS);
testWarning(
"""
/**
* @param {string} namedParam
* @return {void}
*/
function f(namedParam, [pattern]) {
}
""",
WRONG_NUMBER_OF_PARAMS);
testWarning(
"""
/**
* @param {string} namedParam
* @return {void}
*/
function f([pattern], namedParam) {
}
""",
WRONG_NUMBER_OF_PARAMS);
testWarning(
"""
/**
* @param {{
* a: (string|undefined),
* b: (number|undefined),
* c: (boolean|undefined)
* }} obj
*/
function create({a = 'hello', b = 8, c = false} = {}) {}
""",
OPTIONAL_PARAM_NOT_MARKED_OPTIONAL);
// Same as above except there's an '=' to indicate that it's optional.
testSame(
"""
/**
* @param {{
* a: (string|undefined),
* b: (number|undefined),
* c: (boolean|undefined)
* }=} obj
*/
function create({a = 'hello', b = 8, c = false} = {}) {}
""");
}
@Test
public void testMissingParam_defaultValue() {
testWarning(
"""
/**
* @param {string} x
// No @param for y.
*/
function f(x, y = 0) {}
""",
WRONG_NUMBER_OF_PARAMS);
testWarning(
"""
/**
* @param {string} x
* @param {number} y
*/
function f(x, y = 0) {}
""",
OPTIONAL_PARAM_NOT_MARKED_OPTIONAL);
testNoWarning(
"""
/**
* @param {string} x
* @param {number=} y
*/
function f(x, y = 0) {}
""");
testWarning("function f(/** string */ x, y = 0) {}", MISSING_PARAMETER_JSDOC);
testWarning(
"function f(/** string */ x, /** number */ y = 0) {}", OPTIONAL_PARAM_NOT_MARKED_OPTIONAL);
testNoWarning("function f(/** string */ x, /** number= */ y = 0) {}");
}
@Test
public void testMissingParam_rest() {
testWarning(
"""
/**
* @param {string} x
// No @param for y.
*/
function f(x, ...y) {}
""",
WRONG_NUMBER_OF_PARAMS);
testNoWarning(
"""
/**
* @param {string} x
* @param {...number} y
*/
function f(x, ...y) {}
""");
testWarning("function f(/** string */ x, ...y) {}", MISSING_PARAMETER_JSDOC);
testNoWarning("function f(/** string */ x, /** ...number */ ...y) {}");
}
@Test
public void testInvalidMissingParamWithDestructuringPattern_withES6Modules01() {
testWarning(
"""
export
/**
* @param {string} namedParam
* @return {void}
*/
function f(namedParam, {destructuring:pattern}) {
}
""",
WRONG_NUMBER_OF_PARAMS);
}
@Test
public void testInvalidMissingParamWithDestructuringPattern_withES6Modules02() {
testWarning(
"""
export
/**
* @param {{
* a: (string|undefined),
* b: (number|undefined),
* c: (boolean|undefined)
* }} obj
*/
function create({a = 'hello', b = 8, c = false} = {}) {}
""",
OPTIONAL_PARAM_NOT_MARKED_OPTIONAL);
}
@Test
public void testValidMissingParamWithDestructuringPattern_withES6Modules() {
testSame(
"""
export
/**
* @param {{
* a: (string|undefined),
* b: (number|undefined),
* c: (boolean|undefined)
* }=} obj
*/
function create({a = 'hello', b = 8, c = false} = {}) {}
""");
}
@Test
public void testMissingParamWithDestructuringPatternWithDefault() {
testWarning(
"""
/**
* @param {string} namedParam
* @return {void}
*/
function f(namedParam, {destructuring:pattern} = defaultValue) {
}
""",
WRONG_NUMBER_OF_PARAMS);
testWarning(
"""
/**
* @param {string} namedParam
* @return {void}
*/
function f(namedParam, [pattern] = defaultValue) {
}
""",
WRONG_NUMBER_OF_PARAMS);
}
@Test
public void testMissingParamWithDestructuringPatternWithDefault_withES6Modules() {
testWarning(
"""
export
/**
* @param {string} namedParam
* @return {void}
*/
function f(namedParam, {destructuring:pattern} = defaultValue) {
}
""",
WRONG_NUMBER_OF_PARAMS);
}
@Test
public void testParamWithNoTypeInfo() {
testSame(
"""
/**
* @param x A param with no type information.
*/
function f(x) { }
""");
}
@Test
public void testParamWithNoTypeInfo_optional() {
testWarning(
"""
/**
* @param x A param with no type information.
*/
function f(x = undefined) { }
""",
OPTIONAL_PARAM_NOT_MARKED_OPTIONAL);
}
@Test
public void testParamWithNoTypeInfo_withES6Modules() {
testSame(
"""
export
/**
* @param x A param with no type information.
*/
function f(x) { }
""");
}
@Test
public void testMissingPrivate_noWarningWithClosureConvention() {
codingConvention = new ClosureCodingConvention();
testSame(
"""
/**
* @return {number}
* @private
*/
X.prototype.foo = function() { return 0; }
""");
}
@Test
public void testMissingPrivate() {
testSame(
"""
/**
* @return {number}
* @private
*/
X.prototype.foo_ = function() { return 0; }
""");
testSame(
"""
/**
* @type {number}
* @private
*/
X.prototype.foo_ = 0;
""");
testSame(
"""
/** @type {number} */
X.prototype['@some_special_property'] = 0;
""");
}
@Test
public void testNoPrivateWarningsWithSuppressions() {
testNoWarning(
"""
goog.module('mod');
class Foo {
constructor() {
/** @private {number} */
this.n_;
/** @private {number} */
this.m_;
}
setUp() {
/** @suppress {checkTypes} */
this.n_ = ' not a number ';
this.m_ = 1;
}
testSomething() {
alert(this.n_ + this.m_);
}
}
""");
}
@Test
public void testMissingPrivate_dontWarnOnObjectLiteral() {
testSame(
"""
var obj = {
/** @return {number} */
foo_() { return 0; }
}
""");
}
@Test
public void testMissingPrivate_dontWarnOnObjectLiteral_withES6Modules() {
testSame("export var obj = { /** @return {number} */ foo_() { return 0; } }");
}
@Test
public void testOptionalArgs() {
testSame(
"""
/**
* @param {number=} n
*/
function f(n) {}
""");
testSame(
"""
/**
* @param {number} opt_n
*/
function f(opt_n) {}
""",
OPTIONAL_PARAM_NOT_MARKED_OPTIONAL);
testSame(
"""
/**
* @param {number=} opt_n
*/
function f(opt_n) {}
""");
}
@Test
public void testValidOptionalArgs_withES6Modules() {
testSame("export /** @param {number=} n */ function f(n) {}");
}
@Test
public void testInvalidOptionalArgs_withES6Modules() {
testSame(
"export /** @param {number} opt_n */ function f(opt_n) {}",
OPTIONAL_PARAM_NOT_MARKED_OPTIONAL);
}
@Test
public void testParamsOutOfOrder() {
testWarning(
"""
/**
* @param {?} second
* @param {?} first
*/
function f(first, second) {}
""",
INCORRECT_PARAM_NAME);
}
@Test
public void testParamsOutOfOrder_withES6Modules() {
testWarning(
"""
export
/**
* @param {?} second
* @param {?} first
*/
function f(first, second) {}
""",
INCORRECT_PARAM_NAME);
}
@Test
public void testMixedStyles() {
testWarning(
"""
/**
* @param {?} first
* @param {string} second
*/
function f(first, /** string */ second) {}
""",
MIXED_PARAM_JSDOC_STYLES);
}
@Test
public void testMixedStyles_withES6Modules() {
testWarning(
"""
export
/**
* @param {?} first
* @param {string} second
*/
function f(first, /** string */ second) {}
""",
MIXED_PARAM_JSDOC_STYLES);
}
@Test
public void testDestructuring() {
testSame(
"""
/**
* @param {{x: number, y: number}} point
*/
function getDistanceFromZero({x, y}) {}
""");
testSame("function getDistanceFromZero(/** {x: number, y: number} */ {x, y}) {}");
}
@Test
public void testDestructuring_withES6Modules() {
testSame("export function getDistanceFromZero(/** {x: number, y: number} */ {x, y}) {}");
}
@Test
public void testMissingReturn_functionStatement_noWarning() {
testSame("/** @param {number} x */ function f(x) {}");
testSame("/** @constructor */ function f() {}");
testSame("/** @param {number} x */ function f(x) { function bar() { return x; } }");
testSame("/** @param {number} x */ function f(x) { return; }");
testSame("/** @param {number} x\n * @return {number} */ function f(x) { return x; }");
testSame("/** @param {number} x */ function /** number */ f(x) { return x; }");
}
@Test
public void testMissingReturn_functionStatement_noWarning_withES6Modules() {
testSame("export /** @param {number} x */ function f(x) {}");
}
@Test
public void testMissingReturn_assign_noWarning() {
testSame("/** @param {number} x */ f = function(x) {}");
testSame("/** @constructor */ f = function() {}");
testSame("/** @param {number} x */ f = function(x) { function bar() { return x; } }");
testSame("/** @param {number} x */ f = function(x) { return; }");
testSame("/** @param {number} x\n * @return {number} */ f = function(x) { return x; }");
}
@Test
public void testMissingParamOrReturn_warnOnOverrideMethodsAndFields() {
testWarning(
"""
/**
* @override
* @param {string} x
*/
Foo.Bar = function(x) { return x; }
""", // function assigned to a field
MISSING_RETURN_JSDOC);
testWarning(
"""
/**
* @override
* @param {string} x
*/
function f(x) { return x; }
""",
MISSING_RETURN_JSDOC);
testWarning(
"""
/**
* @override
* @return {string}
*/
Foo.Bar = function(x) { return x; }
""",
MISSING_PARAMETER_JSDOC);
testWarning(
"""
/**
* @override
* @param {string} x
*/
Foo.bar = function(x, y) {}
""",
WRONG_NUMBER_OF_PARAMS);
// also test `@inheritDoc` annotations
testWarning(
"""
/**
* @inheritDoc
* @return {string} x
*/
function f(x) { return x; }
""",
MISSING_PARAMETER_JSDOC);
testWarning(
"""
/**
* @inheritDoc
* @return {string} x
*/
Foo.Bar = function(x) { return x; }
""", // assigned to a field
MISSING_PARAMETER_JSDOC);
// inline param type
testNoWarning(
"""
/**
* @override
* @return {string}
*/
var f = function(/** string */ x) { return x; }
""");
}
@Test
public void testMissingReturn_var_noWarning() {
testSame("/** @param {number} x */ var f = function(x) {}");
testSame("/** @constructor */ var f = function() {}");
testSame("/** @param {number} x */ var f = function(x) { function bar() { return x; } }");
testSame("/** @param {number} x */ var f = function(x) { return; }");
testSame("/** @param {number} x\n * @return {number} */ var f = function(x) { return x; }");
testSame("/** @const {function(number): number} */ var f = function(x) { return x; }");
}
@Test
public void testMissingReturn_constructor_noWarning() {
testSame("/** @constructor */ var C = function() { return null; }");
}
@Test
public void testMissingReturn_class_constructor_noWarning() {
testSame("class C { /** @param {Array} x */ constructor(x) { return x; } }");
}
@Test
public void testMissingReturn_var_noWarning_withES6Modules() {
testSame("export /** @param {number} x */ var f = function(x) {}");
}
@Test
public void testMissingReturn_functionStatement() {
testWarning("/** @param {number} x */ function f(x) { return x; }", MISSING_RETURN_JSDOC);
testWarning(
"""
/** @param {number} x */
function f(x) {
/** @param {number} x */
function bar(x) {
return x;
}
}
""",
MISSING_RETURN_JSDOC);
testWarning(
"/** @param {number} x */ function f(x) { if (true) { return x; } }", MISSING_RETURN_JSDOC);
testWarning(
"/** @param {number} x @constructor */ function f(x) { return x; }", MISSING_RETURN_JSDOC);
}
@Test
public void testMissingReturn_functionStatement_withES6Modules() {
testWarning(
"export /** @param {number} x */ function f(x) { return x; }", MISSING_RETURN_JSDOC);
}
@Test
public void testMissingReturn_assign() {
testWarning("/** @param {number} x */ f = function(x) { return x; }", MISSING_RETURN_JSDOC);
testWarning(
"""
/** @param {number} x */
function f(x) {
/** @param {number} x */
bar = function(x) {
return x;
}
}
""",
MISSING_RETURN_JSDOC);
testWarning(
"/** @param {number} x */ f = function(x) { if (true) { return x; } }",
MISSING_RETURN_JSDOC);
testWarning(
"/** @param {number} x @constructor */ f = function(x) { return x; }",
MISSING_RETURN_JSDOC);
}
@Test
public void testMissingReturn_assign_withES6Modules() {
testWarning(
"""
/** @param {number} x */
export
function f(x) {
/** @param {number} x */
bar = function(x) {
return x;
}
}
""",
MISSING_RETURN_JSDOC);
}
@Test
public void testMissingReturn_var() {
testWarning("/** @param {number} x */ var f = function(x) { return x; }", MISSING_RETURN_JSDOC);
testWarning(
"""
/** @param {number} x */
function f(x) {
/** @param {number} x */
var bar = function(x) {
return x;
}
}
""",
MISSING_RETURN_JSDOC);
testWarning(
"/** @param {number} x */ var f = function(x) { if (true) { return x; } }",
MISSING_RETURN_JSDOC);
testWarning(
"/** @param {number} x @constructor */ var f = function(x) { return x; }",
MISSING_RETURN_JSDOC);
}
@Test
public void testMissingReturn_var_withES6Modules() {
testWarning(
"export /** @param {number} x */ var f = function(x) { return x; }", MISSING_RETURN_JSDOC);
}
@Test
public void testExternsAnnotation() {
test(externs("function Example() {}"), srcs(""), warning(EXTERNS_FILES_SHOULD_BE_ANNOTATED));
testSame(
externs(
"/** @fileoverview Some super cool externs.\n * @externs\n */ function Example() {}"),
srcs(""));
testSame(
externs(
"""
/** @fileoverview Some super cool externs.
* @externs
*/
/** @constructor */ function Example() {}
/** @param {number} x */ function example2(x) {}
"""),
srcs(""));
test(
srcs(
"/** @fileoverview Some externs.\n * @externs\n */ /** @const */ var example;",
"/** @fileoverview Some more.\n * @externs\n */ /** @const */ var example2;"),
expected(new String[] {}));
}
@Test
public void testInvalidExternsAnnotation_withES6Modules() {
test(
externs("export function Example() {}"),
srcs(""),
warning(EXTERNS_FILES_SHOULD_BE_ANNOTATED));
}
@Test
public void testValidExternsAnnotation_withES6Modules() {
testSame(
externs(
"""
export /** @fileoverview Some super cool externs.
* @externs
*/
function Example() {}
"""),
srcs(""));
}
@Test
public void testValidLicenseCommentWithoutExterns() {
testSame(
srcs(
"""
/**
* @license
* Copyright 2024 Google LLC
*/
function Example() {}
"""));
}
@Test
public void testValidLicenseCommentAndExterns_separateBlocks() {
testSame(
externs(
"""
/**
* @license
* Copyright 2024 Google LLC
*/
/**
* @fileoverview Some super cool externs.
* @externs
*/
function Example() {}
"""),
srcs(""));
}
@Test
public void testValidLicenseCommentAndExterns_sameBlocks() {
testSame(
externs(
"""
/**
* @fileoverview Some super cool externs.
* @externs
* @license
* Copyright 2024 Google LLC
*/
function Example() {}
"""),
srcs(""));
}
@Test
public void testInvalidLicenseComment_containsExterns() {
test(
srcs(
"""
/**
* @license
* Copyright 2024 Google LLC
* @externs - oh no, this tag is treated as part of the @license!
*/
function Example() {}
"""),
warning(LICENSE_CONTAINS_AT_EXTERNS));
}
@Test
public void testAtSignCodeDetectedWhenPresent() {
testWarning(
"/** blah blah {@code blah blah} blah blah */ function f() {}",
PREFER_BACKTICKS_TO_AT_SIGN_CODE);
}
}
|
googleapis/google-cloud-java | 38,210 | java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/ListDiscoveryConfigsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2/dlp.proto
// Protobuf Java Version: 3.25.8
package com.google.privacy.dlp.v2;
/**
*
*
* <pre>
* Response message for ListDiscoveryConfigs.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.ListDiscoveryConfigsResponse}
*/
public final class ListDiscoveryConfigsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.ListDiscoveryConfigsResponse)
ListDiscoveryConfigsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDiscoveryConfigsResponse.newBuilder() to construct.
private ListDiscoveryConfigsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDiscoveryConfigsResponse() {
discoveryConfigs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDiscoveryConfigsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListDiscoveryConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListDiscoveryConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse.class,
com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse.Builder.class);
}
public static final int DISCOVERY_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.privacy.dlp.v2.DiscoveryConfig> discoveryConfigs_;
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.privacy.dlp.v2.DiscoveryConfig> getDiscoveryConfigsList() {
return discoveryConfigs_;
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.privacy.dlp.v2.DiscoveryConfigOrBuilder>
getDiscoveryConfigsOrBuilderList() {
return discoveryConfigs_;
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
@java.lang.Override
public int getDiscoveryConfigsCount() {
return discoveryConfigs_.size();
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryConfig getDiscoveryConfigs(int index) {
return discoveryConfigs_.get(index);
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryConfigOrBuilder getDiscoveryConfigsOrBuilder(
int index) {
return discoveryConfigs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If the next page is available then this value is the next page token to be
* used in the following ListDiscoveryConfigs request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* If the next page is available then this value is the next page token to be
* used in the following ListDiscoveryConfigs request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < discoveryConfigs_.size(); i++) {
output.writeMessage(1, discoveryConfigs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < discoveryConfigs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, discoveryConfigs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse other =
(com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse) obj;
if (!getDiscoveryConfigsList().equals(other.getDiscoveryConfigsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDiscoveryConfigsCount() > 0) {
hash = (37 * hash) + DISCOVERY_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getDiscoveryConfigsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListDiscoveryConfigs.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.ListDiscoveryConfigsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.ListDiscoveryConfigsResponse)
com.google.privacy.dlp.v2.ListDiscoveryConfigsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListDiscoveryConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListDiscoveryConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse.class,
com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse.Builder.class);
}
// Construct using com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (discoveryConfigsBuilder_ == null) {
discoveryConfigs_ = java.util.Collections.emptyList();
} else {
discoveryConfigs_ = null;
discoveryConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListDiscoveryConfigsResponse_descriptor;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse getDefaultInstanceForType() {
return com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse build() {
com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse buildPartial() {
com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse result =
new com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse result) {
if (discoveryConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
discoveryConfigs_ = java.util.Collections.unmodifiableList(discoveryConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.discoveryConfigs_ = discoveryConfigs_;
} else {
result.discoveryConfigs_ = discoveryConfigsBuilder_.build();
}
}
private void buildPartial0(com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse) {
return mergeFrom((com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse other) {
if (other == com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse.getDefaultInstance())
return this;
if (discoveryConfigsBuilder_ == null) {
if (!other.discoveryConfigs_.isEmpty()) {
if (discoveryConfigs_.isEmpty()) {
discoveryConfigs_ = other.discoveryConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDiscoveryConfigsIsMutable();
discoveryConfigs_.addAll(other.discoveryConfigs_);
}
onChanged();
}
} else {
if (!other.discoveryConfigs_.isEmpty()) {
if (discoveryConfigsBuilder_.isEmpty()) {
discoveryConfigsBuilder_.dispose();
discoveryConfigsBuilder_ = null;
discoveryConfigs_ = other.discoveryConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
discoveryConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDiscoveryConfigsFieldBuilder()
: null;
} else {
discoveryConfigsBuilder_.addAllMessages(other.discoveryConfigs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.privacy.dlp.v2.DiscoveryConfig m =
input.readMessage(
com.google.privacy.dlp.v2.DiscoveryConfig.parser(), extensionRegistry);
if (discoveryConfigsBuilder_ == null) {
ensureDiscoveryConfigsIsMutable();
discoveryConfigs_.add(m);
} else {
discoveryConfigsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.privacy.dlp.v2.DiscoveryConfig> discoveryConfigs_ =
java.util.Collections.emptyList();
private void ensureDiscoveryConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
discoveryConfigs_ =
new java.util.ArrayList<com.google.privacy.dlp.v2.DiscoveryConfig>(discoveryConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.DiscoveryConfig,
com.google.privacy.dlp.v2.DiscoveryConfig.Builder,
com.google.privacy.dlp.v2.DiscoveryConfigOrBuilder>
discoveryConfigsBuilder_;
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public java.util.List<com.google.privacy.dlp.v2.DiscoveryConfig> getDiscoveryConfigsList() {
if (discoveryConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(discoveryConfigs_);
} else {
return discoveryConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public int getDiscoveryConfigsCount() {
if (discoveryConfigsBuilder_ == null) {
return discoveryConfigs_.size();
} else {
return discoveryConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public com.google.privacy.dlp.v2.DiscoveryConfig getDiscoveryConfigs(int index) {
if (discoveryConfigsBuilder_ == null) {
return discoveryConfigs_.get(index);
} else {
return discoveryConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public Builder setDiscoveryConfigs(int index, com.google.privacy.dlp.v2.DiscoveryConfig value) {
if (discoveryConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDiscoveryConfigsIsMutable();
discoveryConfigs_.set(index, value);
onChanged();
} else {
discoveryConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public Builder setDiscoveryConfigs(
int index, com.google.privacy.dlp.v2.DiscoveryConfig.Builder builderForValue) {
if (discoveryConfigsBuilder_ == null) {
ensureDiscoveryConfigsIsMutable();
discoveryConfigs_.set(index, builderForValue.build());
onChanged();
} else {
discoveryConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public Builder addDiscoveryConfigs(com.google.privacy.dlp.v2.DiscoveryConfig value) {
if (discoveryConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDiscoveryConfigsIsMutable();
discoveryConfigs_.add(value);
onChanged();
} else {
discoveryConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public Builder addDiscoveryConfigs(int index, com.google.privacy.dlp.v2.DiscoveryConfig value) {
if (discoveryConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDiscoveryConfigsIsMutable();
discoveryConfigs_.add(index, value);
onChanged();
} else {
discoveryConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public Builder addDiscoveryConfigs(
com.google.privacy.dlp.v2.DiscoveryConfig.Builder builderForValue) {
if (discoveryConfigsBuilder_ == null) {
ensureDiscoveryConfigsIsMutable();
discoveryConfigs_.add(builderForValue.build());
onChanged();
} else {
discoveryConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public Builder addDiscoveryConfigs(
int index, com.google.privacy.dlp.v2.DiscoveryConfig.Builder builderForValue) {
if (discoveryConfigsBuilder_ == null) {
ensureDiscoveryConfigsIsMutable();
discoveryConfigs_.add(index, builderForValue.build());
onChanged();
} else {
discoveryConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public Builder addAllDiscoveryConfigs(
java.lang.Iterable<? extends com.google.privacy.dlp.v2.DiscoveryConfig> values) {
if (discoveryConfigsBuilder_ == null) {
ensureDiscoveryConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, discoveryConfigs_);
onChanged();
} else {
discoveryConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public Builder clearDiscoveryConfigs() {
if (discoveryConfigsBuilder_ == null) {
discoveryConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
discoveryConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public Builder removeDiscoveryConfigs(int index) {
if (discoveryConfigsBuilder_ == null) {
ensureDiscoveryConfigsIsMutable();
discoveryConfigs_.remove(index);
onChanged();
} else {
discoveryConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public com.google.privacy.dlp.v2.DiscoveryConfig.Builder getDiscoveryConfigsBuilder(int index) {
return getDiscoveryConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public com.google.privacy.dlp.v2.DiscoveryConfigOrBuilder getDiscoveryConfigsOrBuilder(
int index) {
if (discoveryConfigsBuilder_ == null) {
return discoveryConfigs_.get(index);
} else {
return discoveryConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public java.util.List<? extends com.google.privacy.dlp.v2.DiscoveryConfigOrBuilder>
getDiscoveryConfigsOrBuilderList() {
if (discoveryConfigsBuilder_ != null) {
return discoveryConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(discoveryConfigs_);
}
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public com.google.privacy.dlp.v2.DiscoveryConfig.Builder addDiscoveryConfigsBuilder() {
return getDiscoveryConfigsFieldBuilder()
.addBuilder(com.google.privacy.dlp.v2.DiscoveryConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public com.google.privacy.dlp.v2.DiscoveryConfig.Builder addDiscoveryConfigsBuilder(int index) {
return getDiscoveryConfigsFieldBuilder()
.addBuilder(index, com.google.privacy.dlp.v2.DiscoveryConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* List of configs, up to page_size in ListDiscoveryConfigsRequest.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.DiscoveryConfig discovery_configs = 1;</code>
*/
public java.util.List<com.google.privacy.dlp.v2.DiscoveryConfig.Builder>
getDiscoveryConfigsBuilderList() {
return getDiscoveryConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.DiscoveryConfig,
com.google.privacy.dlp.v2.DiscoveryConfig.Builder,
com.google.privacy.dlp.v2.DiscoveryConfigOrBuilder>
getDiscoveryConfigsFieldBuilder() {
if (discoveryConfigsBuilder_ == null) {
discoveryConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.DiscoveryConfig,
com.google.privacy.dlp.v2.DiscoveryConfig.Builder,
com.google.privacy.dlp.v2.DiscoveryConfigOrBuilder>(
discoveryConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
discoveryConfigs_ = null;
}
return discoveryConfigsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* If the next page is available then this value is the next page token to be
* used in the following ListDiscoveryConfigs request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* If the next page is available then this value is the next page token to be
* used in the following ListDiscoveryConfigs request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* If the next page is available then this value is the next page token to be
* used in the following ListDiscoveryConfigs request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* If the next page is available then this value is the next page token to be
* used in the following ListDiscoveryConfigs request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* If the next page is available then this value is the next page token to be
* used in the following ListDiscoveryConfigs request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.ListDiscoveryConfigsResponse)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListDiscoveryConfigsResponse)
private static final com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse();
}
public static com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDiscoveryConfigsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDiscoveryConfigsResponse>() {
@java.lang.Override
public ListDiscoveryConfigsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDiscoveryConfigsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDiscoveryConfigsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListDiscoveryConfigsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 38,275 | jdk/src/share/classes/sun/tools/jconsole/Plotter.java | /*
* Copyright (c) 2004, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.tools.jconsole;
import java.awt.*;
import java.awt.event.*;
import java.beans.*;
import java.io.*;
import java.lang.reflect.Array;
import java.util.*;
import javax.accessibility.*;
import javax.swing.*;
import javax.swing.border.*;
import javax.swing.filechooser.*;
import javax.swing.filechooser.FileFilter;
import com.sun.tools.jconsole.JConsoleContext;
import static sun.tools.jconsole.Formatter.*;
import static sun.tools.jconsole.ProxyClient.*;
@SuppressWarnings("serial")
public class Plotter extends JComponent
implements Accessible, ActionListener, PropertyChangeListener {
public static enum Unit {
NONE, BYTES, PERCENT
}
static final String[] rangeNames = {
Messages.ONE_MIN,
Messages.FIVE_MIN,
Messages.TEN_MIN,
Messages.THIRTY_MIN,
Messages.ONE_HOUR,
Messages.TWO_HOURS,
Messages.THREE_HOURS,
Messages.SIX_HOURS,
Messages.TWELVE_HOURS,
Messages.ONE_DAY,
Messages.SEVEN_DAYS,
Messages.ONE_MONTH,
Messages.THREE_MONTHS,
Messages.SIX_MONTHS,
Messages.ONE_YEAR,
Messages.ALL
};
static final int[] rangeValues = {
1,
5,
10,
30,
1 * 60,
2 * 60,
3 * 60,
6 * 60,
12 * 60,
1 * 24 * 60,
7 * 24 * 60,
1 * 31 * 24 * 60,
3 * 31 * 24 * 60,
6 * 31 * 24 * 60,
366 * 24 * 60,
-1
};
final static long SECOND = 1000;
final static long MINUTE = 60 * SECOND;
final static long HOUR = 60 * MINUTE;
final static long DAY = 24 * HOUR;
final static Color bgColor = new Color(250, 250, 250);
final static Color defaultColor = Color.blue.darker();
final static int ARRAY_SIZE_INCREMENT = 4000;
private static Stroke dashedStroke;
private TimeStamps times = new TimeStamps();
private ArrayList<Sequence> seqs = new ArrayList<Sequence>();
private JPopupMenu popupMenu;
private JMenu timeRangeMenu;
private JRadioButtonMenuItem[] menuRBs;
private JMenuItem saveAsMI;
private JFileChooser saveFC;
private int viewRange = -1; // Minutes (value <= 0 means full range)
private Unit unit;
private int decimals;
private double decimalsMultiplier;
private Border border = null;
private Rectangle r = new Rectangle(1, 1, 1, 1);
private Font smallFont = null;
// Initial margins, may be recalculated as needed
private int topMargin = 10;
private int bottomMargin = 45;
private int leftMargin = 65;
private int rightMargin = 70;
private final boolean displayLegend;
public Plotter() {
this(Unit.NONE, 0);
}
public Plotter(Unit unit) {
this(unit, 0);
}
public Plotter(Unit unit, int decimals) {
this(unit,decimals,true);
}
// Note: If decimals > 0 then values must be decimally shifted left
// that many places, i.e. multiplied by Math.pow(10.0, decimals).
public Plotter(Unit unit, int decimals, boolean displayLegend) {
this.displayLegend = displayLegend;
setUnit(unit);
setDecimals(decimals);
enableEvents(AWTEvent.MOUSE_EVENT_MASK);
addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
if (getParent() instanceof PlotterPanel) {
getParent().requestFocusInWindow();
}
}
});
}
public void setUnit(Unit unit) {
this.unit = unit;
}
public void setDecimals(int decimals) {
this.decimals = decimals;
this.decimalsMultiplier = Math.pow(10.0, decimals);
}
public void createSequence(String key, String name, Color color, boolean isPlotted) {
Sequence seq = getSequence(key);
if (seq == null) {
seq = new Sequence(key);
}
seq.name = name;
seq.color = (color != null) ? color : defaultColor;
seq.isPlotted = isPlotted;
seqs.add(seq);
}
public void setUseDashedTransitions(String key, boolean b) {
Sequence seq = getSequence(key);
if (seq != null) {
seq.transitionStroke = b ? getDashedStroke() : null;
}
}
public void setIsPlotted(String key, boolean isPlotted) {
Sequence seq = getSequence(key);
if (seq != null) {
seq.isPlotted = isPlotted;
}
}
// Note: If decimals > 0 then values must be decimally shifted left
// that many places, i.e. multiplied by Math.pow(10.0, decimals).
public synchronized void addValues(long time, long... values) {
assert (values.length == seqs.size());
times.add(time);
for (int i = 0; i < values.length; i++) {
seqs.get(i).add(values[i]);
}
repaint();
}
private Sequence getSequence(String key) {
for (Sequence seq : seqs) {
if (seq.key.equals(key)) {
return seq;
}
}
return null;
}
/**
* @return the displayed time range in minutes, or -1 for all data
*/
public int getViewRange() {
return viewRange;
}
/**
* @param minutes the displayed time range in minutes, or -1 to diaplay all data
*/
public void setViewRange(int minutes) {
if (minutes != viewRange) {
int oldValue = viewRange;
viewRange = minutes;
/* Do not i18n this string */
firePropertyChange("viewRange", oldValue, viewRange);
if (popupMenu != null) {
for (int i = 0; i < menuRBs.length; i++) {
if (rangeValues[i] == viewRange) {
menuRBs[i].setSelected(true);
break;
}
}
}
repaint();
}
}
@Override
public JPopupMenu getComponentPopupMenu() {
if (popupMenu == null) {
popupMenu = new JPopupMenu(Messages.CHART_COLON);
timeRangeMenu = new JMenu(Messages.PLOTTER_TIME_RANGE_MENU);
timeRangeMenu.setMnemonic(Resources.getMnemonicInt(Messages.PLOTTER_TIME_RANGE_MENU));
popupMenu.add(timeRangeMenu);
menuRBs = new JRadioButtonMenuItem[rangeNames.length];
ButtonGroup rbGroup = new ButtonGroup();
for (int i = 0; i < rangeNames.length; i++) {
menuRBs[i] = new JRadioButtonMenuItem(rangeNames[i]);
rbGroup.add(menuRBs[i]);
menuRBs[i].addActionListener(this);
if (viewRange == rangeValues[i]) {
menuRBs[i].setSelected(true);
}
timeRangeMenu.add(menuRBs[i]);
}
popupMenu.addSeparator();
saveAsMI = new JMenuItem(Messages.PLOTTER_SAVE_AS_MENU_ITEM);
saveAsMI.setMnemonic(Resources.getMnemonicInt(Messages.PLOTTER_SAVE_AS_MENU_ITEM));
saveAsMI.addActionListener(this);
popupMenu.add(saveAsMI);
}
return popupMenu;
}
public void actionPerformed(ActionEvent ev) {
JComponent src = (JComponent)ev.getSource();
if (src == saveAsMI) {
saveAs();
} else {
int index = timeRangeMenu.getPopupMenu().getComponentIndex(src);
setViewRange(rangeValues[index]);
}
}
private void saveAs() {
if (saveFC == null) {
saveFC = new SaveDataFileChooser();
}
int ret = saveFC.showSaveDialog(this);
if (ret == JFileChooser.APPROVE_OPTION) {
saveDataToFile(saveFC.getSelectedFile());
}
}
private void saveDataToFile(File file) {
try {
PrintStream out = new PrintStream(new FileOutputStream(file));
// Print header line
out.print("Time");
for (Sequence seq : seqs) {
out.print(","+seq.name);
}
out.println();
// Print data lines
if (seqs.size() > 0 && seqs.get(0).size > 0) {
for (int i = 0; i < seqs.get(0).size; i++) {
double excelTime = toExcelTime(times.time(i));
out.print(String.format(Locale.ENGLISH, "%.6f", excelTime));
for (Sequence seq : seqs) {
out.print("," + getFormattedValue(seq.value(i), false));
}
out.println();
}
}
out.close();
JOptionPane.showMessageDialog(this,
Resources.format(Messages.FILE_CHOOSER_SAVED_FILE,
file.getAbsolutePath(),
file.length()));
} catch (IOException ex) {
String msg = ex.getLocalizedMessage();
String path = file.getAbsolutePath();
if (msg.startsWith(path)) {
msg = msg.substring(path.length()).trim();
}
JOptionPane.showMessageDialog(this,
Resources.format(Messages.FILE_CHOOSER_SAVE_FAILED_MESSAGE,
path,
msg),
Messages.FILE_CHOOSER_SAVE_FAILED_TITLE,
JOptionPane.ERROR_MESSAGE);
}
}
@Override
public void paintComponent(Graphics g) {
super.paintComponent(g);
int width = getWidth()-rightMargin-leftMargin-10;
int height = getHeight()-topMargin-bottomMargin;
if (width <= 0 || height <= 0) {
// not enough room to paint anything
return;
}
Color oldColor = g.getColor();
Font oldFont = g.getFont();
Color fg = getForeground();
Color bg = getBackground();
boolean bgIsLight = (bg.getRed() > 200 &&
bg.getGreen() > 200 &&
bg.getBlue() > 200);
((Graphics2D)g).setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
if (smallFont == null) {
smallFont = oldFont.deriveFont(9.0F);
}
r.x = leftMargin - 5;
r.y = topMargin - 8;
r.width = getWidth()-leftMargin-rightMargin;
r.height = getHeight()-topMargin-bottomMargin+16;
if (border == null) {
// By setting colors here, we avoid recalculating them
// over and over.
border = new BevelBorder(BevelBorder.LOWERED,
getBackground().brighter().brighter(),
getBackground().brighter(),
getBackground().darker().darker(),
getBackground().darker());
}
border.paintBorder(this, g, r.x, r.y, r.width, r.height);
// Fill background color
g.setColor(bgColor);
g.fillRect(r.x+2, r.y+2, r.width-4, r.height-4);
g.setColor(oldColor);
long tMin = Long.MAX_VALUE;
long tMax = Long.MIN_VALUE;
long vMin = Long.MAX_VALUE;
long vMax = 1;
int w = getWidth()-rightMargin-leftMargin-10;
int h = getHeight()-topMargin-bottomMargin;
if (times.size > 1) {
tMin = Math.min(tMin, times.time(0));
tMax = Math.max(tMax, times.time(times.size-1));
}
long viewRangeMS;
if (viewRange > 0) {
viewRangeMS = viewRange * MINUTE;
} else {
// Display full time range, but no less than a minute
viewRangeMS = Math.max(tMax - tMin, 1 * MINUTE);
}
// Calculate min/max values
for (Sequence seq : seqs) {
if (seq.size > 0) {
for (int i = 0; i < seq.size; i++) {
if (seq.size == 1 || times.time(i) >= tMax - viewRangeMS) {
long val = seq.value(i);
if (val > Long.MIN_VALUE) {
vMax = Math.max(vMax, val);
vMin = Math.min(vMin, val);
}
}
}
} else {
vMin = 0L;
}
if (unit == Unit.BYTES || !seq.isPlotted) {
// We'll scale only to the first (main) value set.
// TODO: Use a separate property for this.
break;
}
}
// Normalize scale
vMax = normalizeMax(vMax);
if (vMin > 0) {
if (vMax / vMin > 4) {
vMin = 0;
} else {
vMin = normalizeMin(vMin);
}
}
g.setColor(fg);
// Axes
// Draw vertical axis
int x = leftMargin - 18;
int y = topMargin;
FontMetrics fm = g.getFontMetrics();
g.drawLine(x, y, x, y+h);
int n = 5;
if ((""+vMax).startsWith("2")) {
n = 4;
} else if ((""+vMax).startsWith("3")) {
n = 6;
} else if ((""+vMax).startsWith("4")) {
n = 4;
} else if ((""+vMax).startsWith("6")) {
n = 6;
} else if ((""+vMax).startsWith("7")) {
n = 7;
} else if ((""+vMax).startsWith("8")) {
n = 8;
} else if ((""+vMax).startsWith("9")) {
n = 3;
}
// Ticks
ArrayList<Long> tickValues = new ArrayList<Long>();
tickValues.add(vMin);
for (int i = 0; i < n; i++) {
long v = i * vMax / n;
if (v > vMin) {
tickValues.add(v);
}
}
tickValues.add(vMax);
n = tickValues.size();
String[] tickStrings = new String[n];
for (int i = 0; i < n; i++) {
long v = tickValues.get(i);
tickStrings[i] = getSizeString(v, vMax);
}
// Trim trailing decimal zeroes.
if (decimals > 0) {
boolean trimLast = true;
boolean removedDecimalPoint = false;
do {
for (String str : tickStrings) {
if (!(str.endsWith("0") || str.endsWith("."))) {
trimLast = false;
break;
}
}
if (trimLast) {
if (tickStrings[0].endsWith(".")) {
removedDecimalPoint = true;
}
for (int i = 0; i < n; i++) {
String str = tickStrings[i];
tickStrings[i] = str.substring(0, str.length()-1);
}
}
} while (trimLast && !removedDecimalPoint);
}
// Draw ticks
int lastY = Integer.MAX_VALUE;
for (int i = 0; i < n; i++) {
long v = tickValues.get(i);
y = topMargin+h-(int)(h * (v-vMin) / (vMax-vMin));
g.drawLine(x-2, y, x+2, y);
String s = tickStrings[i];
if (unit == Unit.PERCENT) {
s += "%";
}
int sx = x-6-fm.stringWidth(s);
if (y < lastY-13) {
if (checkLeftMargin(sx)) {
// Wait for next repaint
return;
}
g.drawString(s, sx, y+4);
}
// Draw horizontal grid line
g.setColor(Color.lightGray);
g.drawLine(r.x + 4, y, r.x + r.width - 4, y);
g.setColor(fg);
lastY = y;
}
// Draw horizontal axis
x = leftMargin;
y = topMargin + h + 15;
g.drawLine(x, y, x+w, y);
long t1 = tMax;
if (t1 <= 0L) {
// No data yet, so draw current time
t1 = System.currentTimeMillis();
}
long tz = timeDF.getTimeZone().getOffset(t1);
long tickInterval = calculateTickInterval(w, 40, viewRangeMS);
if (tickInterval > 3 * HOUR) {
tickInterval = calculateTickInterval(w, 80, viewRangeMS);
}
long t0 = tickInterval - (t1 - viewRangeMS + tz) % tickInterval;
while (t0 < viewRangeMS) {
x = leftMargin + (int)(w * t0 / viewRangeMS);
g.drawLine(x, y-2, x, y+2);
long t = t1 - viewRangeMS + t0;
String str = formatClockTime(t);
g.drawString(str, x, y+16);
//if (tickInterval > (1 * HOUR) && t % (1 * DAY) == 0) {
if ((t + tz) % (1 * DAY) == 0) {
str = formatDate(t);
g.drawString(str, x, y+27);
}
// Draw vertical grid line
g.setColor(Color.lightGray);
g.drawLine(x, topMargin, x, topMargin + h);
g.setColor(fg);
t0 += tickInterval;
}
// Plot values
int start = 0;
int nValues = 0;
int nLists = seqs.size();
if (nLists > 0) {
nValues = seqs.get(0).size;
}
if (nValues == 0) {
g.setColor(oldColor);
return;
} else {
Sequence seq = seqs.get(0);
// Find starting point
for (int p = 0; p < seq.size; p++) {
if (times.time(p) >= tMax - viewRangeMS) {
start = p;
break;
}
}
}
//Optimization: collapse plot of more than four values per pixel
int pointsPerPixel = (nValues - start) / w;
if (pointsPerPixel < 4) {
pointsPerPixel = 1;
}
// Draw graphs
// Loop backwards over sequences because the first needs to be painted on top
for (int i = nLists-1; i >= 0; i--) {
int x0 = leftMargin;
int y0 = topMargin + h + 1;
Sequence seq = seqs.get(i);
if (seq.isPlotted && seq.size > 0) {
// Paint twice, with white and with color
for (int pass = 0; pass < 2; pass++) {
g.setColor((pass == 0) ? Color.white : seq.color);
int x1 = -1;
long v1 = -1;
for (int p = start; p < nValues; p += pointsPerPixel) {
// Make sure we get the last value
if (pointsPerPixel > 1 && p >= nValues - pointsPerPixel) {
p = nValues - 1;
}
int x2 = (int)(w * (times.time(p)-(t1-viewRangeMS)) / viewRangeMS);
long v2 = seq.value(p);
if (v2 >= vMin && v2 <= vMax) {
int y2 = (int)(h * (v2 -vMin) / (vMax-vMin));
if (x1 >= 0 && v1 >= vMin && v1 <= vMax) {
int y1 = (int)(h * (v1-vMin) / (vMax-vMin));
if (y1 == y2) {
// fillrect is much faster
g.fillRect(x0+x1, y0-y1-pass, x2-x1, 1);
} else {
Graphics2D g2d = (Graphics2D)g;
Stroke oldStroke = null;
if (seq.transitionStroke != null) {
oldStroke = g2d.getStroke();
g2d.setStroke(seq.transitionStroke);
}
g.drawLine(x0+x1, y0-y1-pass, x0+x2, y0-y2-pass);
if (oldStroke != null) {
g2d.setStroke(oldStroke);
}
}
}
}
x1 = x2;
v1 = v2;
}
}
// Current value
long v = seq.value(seq.size - 1);
if (v >= vMin && v <= vMax) {
if (bgIsLight) {
g.setColor(seq.color);
} else {
g.setColor(fg);
}
x = r.x + r.width + 2;
y = topMargin+h-(int)(h * (v-vMin) / (vMax-vMin));
// a small triangle/arrow
g.fillPolygon(new int[] { x+2, x+6, x+6 },
new int[] { y, y+3, y-3 },
3);
}
g.setColor(fg);
}
}
int[] valueStringSlots = new int[nLists];
for (int i = 0; i < nLists; i++) valueStringSlots[i] = -1;
for (int i = 0; i < nLists; i++) {
Sequence seq = seqs.get(i);
if (seq.isPlotted && seq.size > 0) {
// Draw current value
// TODO: collapse values if pointsPerPixel >= 4
long v = seq.value(seq.size - 1);
if (v >= vMin && v <= vMax) {
x = r.x + r.width + 2;
y = topMargin+h-(int)(h * (v-vMin) / (vMax-vMin));
int y2 = getValueStringSlot(valueStringSlots, y, 2*10, i);
g.setFont(smallFont);
if (bgIsLight) {
g.setColor(seq.color);
} else {
g.setColor(fg);
}
String curValue = getFormattedValue(v, true);
if (unit == Unit.PERCENT) {
curValue += "%";
}
int valWidth = fm.stringWidth(curValue);
String legend = (displayLegend?seq.name:"");
int legendWidth = fm.stringWidth(legend);
if (checkRightMargin(valWidth) || checkRightMargin(legendWidth)) {
// Wait for next repaint
return;
}
g.drawString(legend , x + 17, Math.min(topMargin+h, y2 + 3 - 10));
g.drawString(curValue, x + 17, Math.min(topMargin+h + 10, y2 + 3));
// Maybe draw a short line to value
if (y2 > y + 3) {
g.drawLine(x + 9, y + 2, x + 14, y2);
} else if (y2 < y - 3) {
g.drawLine(x + 9, y - 2, x + 14, y2);
}
}
g.setFont(oldFont);
g.setColor(fg);
}
}
g.setColor(oldColor);
}
private boolean checkLeftMargin(int x) {
// Make sure leftMargin has at least 2 pixels over
if (x < 2) {
leftMargin += (2 - x);
// Repaint from top (above any cell renderers)
SwingUtilities.getWindowAncestor(this).repaint();
return true;
}
return false;
}
private boolean checkRightMargin(int w) {
// Make sure rightMargin has at least 2 pixels over
if (w + 2 > rightMargin) {
rightMargin = (w + 2);
// Repaint from top (above any cell renderers)
SwingUtilities.getWindowAncestor(this).repaint();
return true;
}
return false;
}
private int getValueStringSlot(int[] slots, int y, int h, int i) {
for (int s = 0; s < slots.length; s++) {
if (slots[s] >= y && slots[s] < y + h) {
// collide below us
if (slots[s] > h) {
return getValueStringSlot(slots, slots[s]-h, h, i);
} else {
return getValueStringSlot(slots, slots[s]+h, h, i);
}
} else if (y >= h && slots[s] > y - h && slots[s] < y) {
// collide above us
return getValueStringSlot(slots, slots[s]+h, h, i);
}
}
slots[i] = y;
return y;
}
private long calculateTickInterval(int w, int hGap, long viewRangeMS) {
long tickInterval = viewRangeMS * hGap / w;
if (tickInterval < 1 * MINUTE) {
tickInterval = 1 * MINUTE;
} else if (tickInterval < 5 * MINUTE) {
tickInterval = 5 * MINUTE;
} else if (tickInterval < 10 * MINUTE) {
tickInterval = 10 * MINUTE;
} else if (tickInterval < 30 * MINUTE) {
tickInterval = 30 * MINUTE;
} else if (tickInterval < 1 * HOUR) {
tickInterval = 1 * HOUR;
} else if (tickInterval < 3 * HOUR) {
tickInterval = 3 * HOUR;
} else if (tickInterval < 6 * HOUR) {
tickInterval = 6 * HOUR;
} else if (tickInterval < 12 * HOUR) {
tickInterval = 12 * HOUR;
} else if (tickInterval < 1 * DAY) {
tickInterval = 1 * DAY;
} else {
tickInterval = normalizeMax(tickInterval / DAY) * DAY;
}
return tickInterval;
}
private long normalizeMin(long l) {
int exp = (int)Math.log10((double)l);
long multiple = (long)Math.pow(10.0, exp);
int i = (int)(l / multiple);
return i * multiple;
}
private long normalizeMax(long l) {
int exp = (int)Math.log10((double)l);
long multiple = (long)Math.pow(10.0, exp);
int i = (int)(l / multiple);
l = (i+1)*multiple;
return l;
}
private String getFormattedValue(long v, boolean groupDigits) {
String str;
String fmt = "%";
if (groupDigits) {
fmt += ",";
}
if (decimals > 0) {
fmt += "." + decimals + "f";
str = String.format(fmt, v / decimalsMultiplier);
} else {
fmt += "d";
str = String.format(fmt, v);
}
return str;
}
private String getSizeString(long v, long vMax) {
String s;
if (unit == Unit.BYTES && decimals == 0) {
s = formatBytes(v, vMax);
} else {
s = getFormattedValue(v, true);
}
return s;
}
private static synchronized Stroke getDashedStroke() {
if (dashedStroke == null) {
dashedStroke = new BasicStroke(1.0f,
BasicStroke.CAP_BUTT,
BasicStroke.JOIN_MITER,
10.0f,
new float[] { 2.0f, 3.0f },
0.0f);
}
return dashedStroke;
}
private static Object extendArray(Object a1) {
int n = Array.getLength(a1);
Object a2 =
Array.newInstance(a1.getClass().getComponentType(),
n + ARRAY_SIZE_INCREMENT);
System.arraycopy(a1, 0, a2, 0, n);
return a2;
}
private static class TimeStamps {
// Time stamps (long) are split into offsets (long) and a
// series of times from the offsets (int). A new offset is
// stored when the the time value doesn't fit in an int
// (approx every 24 days). An array of indices is used to
// define the starting point for each offset in the times
// array.
long[] offsets = new long[0];
int[] indices = new int[0];
int[] rtimes = new int[ARRAY_SIZE_INCREMENT];
// Number of stored timestamps
int size = 0;
/**
* Returns the time stamp for index i
*/
public long time(int i) {
long offset = 0;
for (int j = indices.length - 1; j >= 0; j--) {
if (i >= indices[j]) {
offset = offsets[j];
break;
}
}
return offset + rtimes[i];
}
public void add(long time) {
// May need to store a new time offset
int n = offsets.length;
if (n == 0 || time - offsets[n - 1] > Integer.MAX_VALUE) {
// Grow offset and indices arrays and store new offset
offsets = Arrays.copyOf(offsets, n + 1);
offsets[n] = time;
indices = Arrays.copyOf(indices, n + 1);
indices[n] = size;
}
// May need to extend the array size
if (rtimes.length == size) {
rtimes = (int[])extendArray(rtimes);
}
// Store the time
rtimes[size] = (int)(time - offsets[offsets.length - 1]);
size++;
}
}
private static class Sequence {
String key;
String name;
Color color;
boolean isPlotted;
Stroke transitionStroke = null;
// Values are stored in an int[] if all values will fit,
// otherwise in a long[]. An int can represent up to 2 GB.
// Use a random start size, so all arrays won't need to
// be grown during the same update interval
Object values =
new byte[ARRAY_SIZE_INCREMENT + (int)(Math.random() * 100)];
// Number of stored values
int size = 0;
public Sequence(String key) {
this.key = key;
}
/**
* Returns the value at index i
*/
public long value(int i) {
return Array.getLong(values, i);
}
public void add(long value) {
// May need to switch to a larger array type
if ((values instanceof byte[] ||
values instanceof short[] ||
values instanceof int[]) &&
value > Integer.MAX_VALUE) {
long[] la = new long[Array.getLength(values)];
for (int i = 0; i < size; i++) {
la[i] = Array.getLong(values, i);
}
values = la;
} else if ((values instanceof byte[] ||
values instanceof short[]) &&
value > Short.MAX_VALUE) {
int[] ia = new int[Array.getLength(values)];
for (int i = 0; i < size; i++) {
ia[i] = Array.getInt(values, i);
}
values = ia;
} else if (values instanceof byte[] &&
value > Byte.MAX_VALUE) {
short[] sa = new short[Array.getLength(values)];
for (int i = 0; i < size; i++) {
sa[i] = Array.getShort(values, i);
}
values = sa;
}
// May need to extend the array size
if (Array.getLength(values) == size) {
values = extendArray(values);
}
// Store the value
if (values instanceof long[]) {
((long[])values)[size] = value;
} else if (values instanceof int[]) {
((int[])values)[size] = (int)value;
} else if (values instanceof short[]) {
((short[])values)[size] = (short)value;
} else {
((byte[])values)[size] = (byte)value;
}
size++;
}
}
// Can be overridden by subclasses
long getValue() {
return 0;
}
long getLastTimeStamp() {
return times.time(times.size - 1);
}
long getLastValue(String key) {
Sequence seq = getSequence(key);
return (seq != null && seq.size > 0) ? seq.value(seq.size - 1) : 0L;
}
// Called on EDT
public void propertyChange(PropertyChangeEvent ev) {
String prop = ev.getPropertyName();
if (prop == JConsoleContext.CONNECTION_STATE_PROPERTY) {
ConnectionState newState = (ConnectionState)ev.getNewValue();
switch (newState) {
case DISCONNECTED:
synchronized(this) {
long time = System.currentTimeMillis();
times.add(time);
for (Sequence seq : seqs) {
seq.add(Long.MIN_VALUE);
}
}
break;
}
}
}
private static class SaveDataFileChooser extends JFileChooser {
private static final long serialVersionUID = -5182890922369369669L;
SaveDataFileChooser() {
setFileFilter(new FileNameExtensionFilter("CSV file", "csv"));
}
@Override
public void approveSelection() {
File file = getSelectedFile();
if (file != null) {
FileFilter filter = getFileFilter();
if (filter != null && filter instanceof FileNameExtensionFilter) {
String[] extensions =
((FileNameExtensionFilter)filter).getExtensions();
boolean goodExt = false;
for (String ext : extensions) {
if (file.getName().toLowerCase().endsWith("." + ext.toLowerCase())) {
goodExt = true;
break;
}
}
if (!goodExt) {
file = new File(file.getParent(),
file.getName() + "." + extensions[0]);
}
}
if (file.exists()) {
String okStr = Messages.FILE_CHOOSER_FILE_EXISTS_OK_OPTION;
String cancelStr = Messages.FILE_CHOOSER_FILE_EXISTS_CANCEL_OPTION;
int ret =
JOptionPane.showOptionDialog(this,
Resources.format(Messages.FILE_CHOOSER_FILE_EXISTS_MESSAGE,
file.getName()),
Messages.FILE_CHOOSER_FILE_EXISTS_TITLE,
JOptionPane.OK_CANCEL_OPTION,
JOptionPane.WARNING_MESSAGE,
null,
new Object[] { okStr, cancelStr },
okStr);
if (ret != JOptionPane.OK_OPTION) {
return;
}
}
setSelectedFile(file);
}
super.approveSelection();
}
}
@Override
public AccessibleContext getAccessibleContext() {
if (accessibleContext == null) {
accessibleContext = new AccessiblePlotter();
}
return accessibleContext;
}
protected class AccessiblePlotter extends AccessibleJComponent {
private static final long serialVersionUID = -3847205410473510922L;
protected AccessiblePlotter() {
setAccessibleName(Messages.PLOTTER_ACCESSIBLE_NAME);
}
@Override
public String getAccessibleName() {
String name = super.getAccessibleName();
if (seqs.size() > 0 && seqs.get(0).size > 0) {
String keyValueList = "";
for (Sequence seq : seqs) {
if (seq.isPlotted) {
String value = "null";
if (seq.size > 0) {
if (unit == Unit.BYTES) {
value = Resources.format(Messages.SIZE_BYTES, seq.value(seq.size - 1));
} else {
value =
getFormattedValue(seq.value(seq.size - 1), false) +
((unit == Unit.PERCENT) ? "%" : "");
}
}
// Assume format string ends with newline
keyValueList +=
Resources.format(Messages.PLOTTER_ACCESSIBLE_NAME_KEY_AND_VALUE,
seq.key, value);
}
}
name += "\n" + keyValueList + ".";
} else {
name += "\n" + Messages.PLOTTER_ACCESSIBLE_NAME_NO_DATA;
}
return name;
}
@Override
public AccessibleRole getAccessibleRole() {
return AccessibleRole.CANVAS;
}
}
}
|
googleapis/google-cloud-java | 38,068 | java-retail/google-cloud-retail/src/test/java/com/google/cloud/retail/v2beta/CatalogServiceClientHttpJsonTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.retail.v2beta;
import static com.google.cloud.retail.v2beta.CatalogServiceClient.ListCatalogsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.testing.MockHttpService;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ApiException;
import com.google.api.gax.rpc.ApiExceptionFactory;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.testing.FakeStatusCode;
import com.google.cloud.retail.v2beta.stub.HttpJsonCatalogServiceStub;
import com.google.common.collect.Lists;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class CatalogServiceClientHttpJsonTest {
private static MockHttpService mockService;
private static CatalogServiceClient client;
@BeforeClass
public static void startStaticServer() throws IOException {
mockService =
new MockHttpService(
HttpJsonCatalogServiceStub.getMethodDescriptors(),
CatalogServiceSettings.getDefaultEndpoint());
CatalogServiceSettings settings =
CatalogServiceSettings.newHttpJsonBuilder()
.setTransportChannelProvider(
CatalogServiceSettings.defaultHttpJsonTransportProviderBuilder()
.setHttpTransport(mockService)
.build())
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = CatalogServiceClient.create(settings);
}
@AfterClass
public static void stopServer() {
client.close();
}
@Before
public void setUp() {}
@After
public void tearDown() throws Exception {
mockService.reset();
}
@Test
public void listCatalogsTest() throws Exception {
Catalog responsesElement = Catalog.newBuilder().build();
ListCatalogsResponse expectedResponse =
ListCatalogsResponse.newBuilder()
.setNextPageToken("")
.addAllCatalogs(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent);
List<Catalog> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listCatalogsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
client.listCatalogs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listCatalogsTest2() throws Exception {
Catalog responsesElement = Catalog.newBuilder().build();
ListCatalogsResponse expectedResponse =
ListCatalogsResponse.newBuilder()
.setNextPageToken("")
.addAllCatalogs(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
String parent = "projects/project-5833/locations/location-5833";
ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent);
List<Catalog> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listCatalogsExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String parent = "projects/project-5833/locations/location-5833";
client.listCatalogs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateCatalogTest() throws Exception {
Catalog expectedResponse =
Catalog.newBuilder()
.setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setDisplayName("displayName1714148973")
.setProductLevelConfig(ProductLevelConfig.newBuilder().build())
.setMerchantCenterLinkingConfig(MerchantCenterLinkingConfig.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
Catalog catalog =
Catalog.newBuilder()
.setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setDisplayName("displayName1714148973")
.setProductLevelConfig(ProductLevelConfig.newBuilder().build())
.setMerchantCenterLinkingConfig(MerchantCenterLinkingConfig.newBuilder().build())
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
Catalog actualResponse = client.updateCatalog(catalog, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void updateCatalogExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
Catalog catalog =
Catalog.newBuilder()
.setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setDisplayName("displayName1714148973")
.setProductLevelConfig(ProductLevelConfig.newBuilder().build())
.setMerchantCenterLinkingConfig(MerchantCenterLinkingConfig.newBuilder().build())
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateCatalog(catalog, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setDefaultBranchTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockService.addResponse(expectedResponse);
CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
client.setDefaultBranch(catalog);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void setDefaultBranchExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
client.setDefaultBranch(catalog);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setDefaultBranchTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockService.addResponse(expectedResponse);
String catalog = "projects/project-6372/locations/location-6372/catalogs/catalog-6372";
client.setDefaultBranch(catalog);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void setDefaultBranchExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String catalog = "projects/project-6372/locations/location-6372/catalogs/catalog-6372";
client.setDefaultBranch(catalog);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDefaultBranchTest() throws Exception {
GetDefaultBranchResponse expectedResponse =
GetDefaultBranchResponse.newBuilder()
.setBranch(BranchName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[BRANCH]").toString())
.setSetTime(Timestamp.newBuilder().build())
.setNote("note3387378")
.build();
mockService.addResponse(expectedResponse);
CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
GetDefaultBranchResponse actualResponse = client.getDefaultBranch(catalog);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getDefaultBranchExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
client.getDefaultBranch(catalog);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDefaultBranchTest2() throws Exception {
GetDefaultBranchResponse expectedResponse =
GetDefaultBranchResponse.newBuilder()
.setBranch(BranchName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[BRANCH]").toString())
.setSetTime(Timestamp.newBuilder().build())
.setNote("note3387378")
.build();
mockService.addResponse(expectedResponse);
String catalog = "projects/project-6372/locations/location-6372/catalogs/catalog-6372";
GetDefaultBranchResponse actualResponse = client.getDefaultBranch(catalog);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getDefaultBranchExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String catalog = "projects/project-6372/locations/location-6372/catalogs/catalog-6372";
client.getDefaultBranch(catalog);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getCompletionConfigTest() throws Exception {
CompletionConfig expectedResponse =
CompletionConfig.newBuilder()
.setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setMatchingOrder("matchingOrder-1366761135")
.setMaxSuggestions(618824852)
.setMinPrefixLength(96853510)
.setAutoLearning(true)
.setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751")
.setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastDenylistImportOperation("lastDenylistImportOperation1262341570")
.setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689")
.build();
mockService.addResponse(expectedResponse);
CompletionConfigName name = CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
CompletionConfig actualResponse = client.getCompletionConfig(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getCompletionConfigExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
CompletionConfigName name = CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
client.getCompletionConfig(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getCompletionConfigTest2() throws Exception {
CompletionConfig expectedResponse =
CompletionConfig.newBuilder()
.setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setMatchingOrder("matchingOrder-1366761135")
.setMaxSuggestions(618824852)
.setMinPrefixLength(96853510)
.setAutoLearning(true)
.setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751")
.setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastDenylistImportOperation("lastDenylistImportOperation1262341570")
.setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689")
.build();
mockService.addResponse(expectedResponse);
String name =
"projects/project-6627/locations/location-6627/catalogs/catalog-6627/completionConfig";
CompletionConfig actualResponse = client.getCompletionConfig(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getCompletionConfigExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name =
"projects/project-6627/locations/location-6627/catalogs/catalog-6627/completionConfig";
client.getCompletionConfig(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateCompletionConfigTest() throws Exception {
CompletionConfig expectedResponse =
CompletionConfig.newBuilder()
.setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setMatchingOrder("matchingOrder-1366761135")
.setMaxSuggestions(618824852)
.setMinPrefixLength(96853510)
.setAutoLearning(true)
.setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751")
.setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastDenylistImportOperation("lastDenylistImportOperation1262341570")
.setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689")
.build();
mockService.addResponse(expectedResponse);
CompletionConfig completionConfig =
CompletionConfig.newBuilder()
.setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setMatchingOrder("matchingOrder-1366761135")
.setMaxSuggestions(618824852)
.setMinPrefixLength(96853510)
.setAutoLearning(true)
.setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751")
.setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastDenylistImportOperation("lastDenylistImportOperation1262341570")
.setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689")
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
CompletionConfig actualResponse = client.updateCompletionConfig(completionConfig, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void updateCompletionConfigExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
CompletionConfig completionConfig =
CompletionConfig.newBuilder()
.setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setMatchingOrder("matchingOrder-1366761135")
.setMaxSuggestions(618824852)
.setMinPrefixLength(96853510)
.setAutoLearning(true)
.setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751")
.setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastDenylistImportOperation("lastDenylistImportOperation1262341570")
.setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689")
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateCompletionConfig(completionConfig, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getAttributesConfigTest() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
AttributesConfigName name = AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
AttributesConfig actualResponse = client.getAttributesConfig(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getAttributesConfigExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
AttributesConfigName name = AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
client.getAttributesConfig(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getAttributesConfigTest2() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
String name =
"projects/project-9790/locations/location-9790/catalogs/catalog-9790/attributesConfig";
AttributesConfig actualResponse = client.getAttributesConfig(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getAttributesConfigExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name =
"projects/project-9790/locations/location-9790/catalogs/catalog-9790/attributesConfig";
client.getAttributesConfig(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateAttributesConfigTest() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
AttributesConfig attributesConfig =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
AttributesConfig actualResponse = client.updateAttributesConfig(attributesConfig, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void updateAttributesConfigExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
AttributesConfig attributesConfig =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateAttributesConfig(attributesConfig, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void addCatalogAttributeTest() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
AddCatalogAttributeRequest request =
AddCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setCatalogAttribute(CatalogAttribute.newBuilder().build())
.build();
AttributesConfig actualResponse = client.addCatalogAttribute(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void addCatalogAttributeExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
AddCatalogAttributeRequest request =
AddCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setCatalogAttribute(CatalogAttribute.newBuilder().build())
.build();
client.addCatalogAttribute(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void removeCatalogAttributeTest() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
RemoveCatalogAttributeRequest request =
RemoveCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setKey("key106079")
.build();
AttributesConfig actualResponse = client.removeCatalogAttribute(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void removeCatalogAttributeExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
RemoveCatalogAttributeRequest request =
RemoveCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setKey("key106079")
.build();
client.removeCatalogAttribute(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void batchRemoveCatalogAttributesTest() throws Exception {
BatchRemoveCatalogAttributesResponse expectedResponse =
BatchRemoveCatalogAttributesResponse.newBuilder()
.addAllDeletedCatalogAttributes(new ArrayList<String>())
.addAllResetCatalogAttributes(new ArrayList<String>())
.build();
mockService.addResponse(expectedResponse);
BatchRemoveCatalogAttributesRequest request =
BatchRemoveCatalogAttributesRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.addAllAttributeKeys(new ArrayList<String>())
.build();
BatchRemoveCatalogAttributesResponse actualResponse =
client.batchRemoveCatalogAttributes(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void batchRemoveCatalogAttributesExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
BatchRemoveCatalogAttributesRequest request =
BatchRemoveCatalogAttributesRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.addAllAttributeKeys(new ArrayList<String>())
.build();
client.batchRemoveCatalogAttributes(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void replaceCatalogAttributeTest() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
ReplaceCatalogAttributeRequest request =
ReplaceCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setCatalogAttribute(CatalogAttribute.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
AttributesConfig actualResponse = client.replaceCatalogAttribute(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void replaceCatalogAttributeExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
ReplaceCatalogAttributeRequest request =
ReplaceCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setCatalogAttribute(CatalogAttribute.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
client.replaceCatalogAttribute(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
apache/hbase | 38,454 | hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDataTieringManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY;
import static org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.DEFAULT_ERROR_TOLERATION_DURATION;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.BlockCacheFactory;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.CacheTestUtils;
import org.apache.hadoop.hbase.io.hfile.HFileBlock;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.Pair;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class is used to test the functionality of the DataTieringManager.
*
* The mock online regions are stored in {@link TestDataTieringManager#testOnlineRegions}.
* For all tests, the setup of {@link TestDataTieringManager#testOnlineRegions} occurs only once.
* Please refer to {@link TestDataTieringManager#setupOnlineRegions()} for the structure.
* Additionally, a list of all store files is maintained in {@link TestDataTieringManager#hStoreFiles}.
* The characteristics of these store files are listed below:
* @formatter:off ## HStoreFile Information
*
* | HStoreFile | Region | Store | DataTiering | isHot |
* |------------------|--------------------|---------------------|-----------------------|-------|
* | hStoreFile0 | region1 | hStore11 | TIME_RANGE | true |
* | hStoreFile1 | region1 | hStore12 | NONE | true |
* | hStoreFile2 | region2 | hStore21 | TIME_RANGE | true |
* | hStoreFile3 | region2 | hStore22 | TIME_RANGE | false |
* @formatter:on
*/
@Category({ RegionServerTests.class, SmallTests.class })
public class TestDataTieringManager {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestDataTieringManager.class);
private static final Logger LOG = LoggerFactory.getLogger(TestDataTieringManager.class);
private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
private static final long DAY = 24 * 60 * 60 * 1000;
private static Configuration defaultConf;
private static FileSystem fs;
private static BlockCache blockCache;
private static CacheConfig cacheConf;
private static Path testDir;
private static final Map<String, HRegion> testOnlineRegions = new HashMap<>();
private static DataTieringManager dataTieringManager;
private static final List<HStoreFile> hStoreFiles = new ArrayList<>();
/**
* Represents the current lexicographically increasing string used as a row key when writing
* HFiles. It is incremented each time {@link #nextString()} is called to generate unique row
* keys.
*/
private static String rowKeyString;
@BeforeClass
public static void setupBeforeClass() throws Exception {
testDir = TEST_UTIL.getDataTestDir(TestDataTieringManager.class.getSimpleName());
defaultConf = TEST_UTIL.getConfiguration();
updateCommonConfigurations();
assertTrue(DataTieringManager.instantiate(defaultConf, testOnlineRegions));
dataTieringManager = DataTieringManager.getInstance();
rowKeyString = "";
}
private static void updateCommonConfigurations() {
defaultConf.setBoolean(DataTieringManager.GLOBAL_DATA_TIERING_ENABLED_KEY, true);
defaultConf.setStrings(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");
defaultConf.setLong(BUCKET_CACHE_SIZE_KEY, 32);
}
@FunctionalInterface
interface DataTieringMethodCallerWithPath {
boolean call(DataTieringManager manager, Path path) throws DataTieringException;
}
@FunctionalInterface
interface DataTieringMethodCallerWithKey {
boolean call(DataTieringManager manager, BlockCacheKey key) throws DataTieringException;
}
@Test
public void testDataTieringEnabledWithKey() throws IOException {
initializeTestEnvironment();
DataTieringMethodCallerWithKey methodCallerWithKey = DataTieringManager::isDataTieringEnabled;
// Test with valid key
BlockCacheKey key = new BlockCacheKey(hStoreFiles.get(0).getPath(), 0, true, BlockType.DATA);
testDataTieringMethodWithKeyNoException(methodCallerWithKey, key, true);
// Test with another valid key
key = new BlockCacheKey(hStoreFiles.get(1).getPath(), 0, true, BlockType.DATA);
testDataTieringMethodWithKeyNoException(methodCallerWithKey, key, false);
// Test with valid key with no HFile Path
key = new BlockCacheKey(hStoreFiles.get(0).getPath().getName(), 0);
testDataTieringMethodWithKeyExpectingException(methodCallerWithKey, key,
new DataTieringException("BlockCacheKey Doesn't Contain HFile Path"));
}
@Test
public void testDataTieringEnabledWithPath() throws IOException {
initializeTestEnvironment();
DataTieringMethodCallerWithPath methodCallerWithPath = DataTieringManager::isDataTieringEnabled;
// Test with valid path
Path hFilePath = hStoreFiles.get(1).getPath();
testDataTieringMethodWithPathNoException(methodCallerWithPath, hFilePath, false);
// Test with another valid path
hFilePath = hStoreFiles.get(3).getPath();
testDataTieringMethodWithPathNoException(methodCallerWithPath, hFilePath, true);
// Test with an incorrect path
hFilePath = new Path("incorrectPath");
testDataTieringMethodWithPathExpectingException(methodCallerWithPath, hFilePath,
new DataTieringException("Incorrect HFile Path: " + hFilePath));
// Test with a non-existing HRegion path
Path basePath = hStoreFiles.get(0).getPath().getParent().getParent().getParent();
hFilePath = new Path(basePath, "incorrectRegion/cf1/filename");
testDataTieringMethodWithPathExpectingException(methodCallerWithPath, hFilePath,
new DataTieringException("HRegion corresponding to " + hFilePath + " doesn't exist"));
// Test with a non-existing HStore path
basePath = hStoreFiles.get(0).getPath().getParent().getParent();
hFilePath = new Path(basePath, "incorrectCf/filename");
testDataTieringMethodWithPathExpectingException(methodCallerWithPath, hFilePath,
new DataTieringException("HStore corresponding to " + hFilePath + " doesn't exist"));
}
@Test
public void testHotDataWithKey() throws IOException {
initializeTestEnvironment();
DataTieringMethodCallerWithKey methodCallerWithKey = DataTieringManager::isHotData;
// Test with valid key
BlockCacheKey key = new BlockCacheKey(hStoreFiles.get(0).getPath(), 0, true, BlockType.DATA);
testDataTieringMethodWithKeyNoException(methodCallerWithKey, key, true);
// Test with another valid key
key = new BlockCacheKey(hStoreFiles.get(3).getPath(), 0, true, BlockType.DATA);
testDataTieringMethodWithKeyNoException(methodCallerWithKey, key, false);
}
@Test
public void testHotDataWithPath() throws IOException {
initializeTestEnvironment();
DataTieringMethodCallerWithPath methodCallerWithPath = DataTieringManager::isHotData;
// Test with valid path
Path hFilePath = hStoreFiles.get(2).getPath();
testDataTieringMethodWithPathNoException(methodCallerWithPath, hFilePath, true);
// Test with another valid path
hFilePath = hStoreFiles.get(3).getPath();
testDataTieringMethodWithPathNoException(methodCallerWithPath, hFilePath, false);
// Test with a filename where corresponding HStoreFile in not present
hFilePath = new Path(hStoreFiles.get(0).getPath().getParent(), "incorrectFileName");
testDataTieringMethodWithPathExpectingException(methodCallerWithPath, hFilePath,
new DataTieringException("Store file corresponding to " + hFilePath + " doesn't exist"));
}
@Test
public void testPrefetchWhenDataTieringEnabled() throws IOException {
setPrefetchBlocksOnOpen();
initializeTestEnvironment();
// Evict blocks from cache by closing the files and passing evict on close.
// Then initialize the reader again. Since Prefetch on open is set to true, it should prefetch
// those blocks.
for (HStoreFile file : hStoreFiles) {
file.closeStoreFile(true);
file.initReader();
}
// Since we have one cold file among four files, only three should get prefetched.
Optional<Map<String, Pair<String, Long>>> fullyCachedFiles = blockCache.getFullyCachedFiles();
assertTrue("We should get the fully cached files from the cache", fullyCachedFiles.isPresent());
Waiter.waitFor(defaultConf, 10000, () -> fullyCachedFiles.get().size() == 3);
assertEquals("Number of fully cached files are incorrect", 3, fullyCachedFiles.get().size());
}
private void setPrefetchBlocksOnOpen() {
defaultConf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true);
}
@Test
public void testColdDataFiles() throws IOException {
initializeTestEnvironment();
Set<BlockCacheKey> allCachedBlocks = new HashSet<>();
for (HStoreFile file : hStoreFiles) {
allCachedBlocks.add(new BlockCacheKey(file.getPath(), 0, true, BlockType.DATA));
}
// Verify hStoreFile3 is identified as cold data
DataTieringMethodCallerWithPath methodCallerWithPath = DataTieringManager::isHotData;
Path hFilePath = hStoreFiles.get(3).getPath();
testDataTieringMethodWithPathNoException(methodCallerWithPath, hFilePath, false);
// Verify all the other files in hStoreFiles are hot data
for (int i = 0; i < hStoreFiles.size() - 1; i++) {
hFilePath = hStoreFiles.get(i).getPath();
testDataTieringMethodWithPathNoException(methodCallerWithPath, hFilePath, true);
}
try {
Set<String> coldFilePaths = dataTieringManager.getColdDataFiles(allCachedBlocks);
assertEquals(1, coldFilePaths.size());
} catch (DataTieringException e) {
fail("Unexpected DataTieringException: " + e.getMessage());
}
}
@Test
public void testCacheCompactedBlocksOnWriteDataTieringDisabled() throws IOException {
setCacheCompactBlocksOnWrite();
initializeTestEnvironment();
HRegion region = createHRegion("table3");
testCacheCompactedBlocksOnWrite(region, true);
}
@Test
public void testCacheCompactedBlocksOnWriteWithHotData() throws IOException {
setCacheCompactBlocksOnWrite();
initializeTestEnvironment();
HRegion region = createHRegion("table3", getConfWithTimeRangeDataTieringEnabled(5 * DAY));
testCacheCompactedBlocksOnWrite(region, true);
}
@Test
public void testCacheCompactedBlocksOnWriteWithColdData() throws IOException {
setCacheCompactBlocksOnWrite();
initializeTestEnvironment();
HRegion region = createHRegion("table3", getConfWithTimeRangeDataTieringEnabled(DAY));
testCacheCompactedBlocksOnWrite(region, false);
}
private void setCacheCompactBlocksOnWrite() {
defaultConf.setBoolean(CacheConfig.CACHE_COMPACTED_BLOCKS_ON_WRITE_KEY, true);
}
private void testCacheCompactedBlocksOnWrite(HRegion region, boolean expectDataBlocksCached)
throws IOException {
HStore hStore = createHStore(region, "cf1");
createTestFilesForCompaction(hStore);
hStore.refreshStoreFiles();
region.stores.put(Bytes.toBytes("cf1"), hStore);
testOnlineRegions.put(region.getRegionInfo().getEncodedName(), region);
long initialStoreFilesCount = hStore.getStorefilesCount();
long initialCacheDataBlockCount = blockCache.getDataBlockCount();
assertEquals(3, initialStoreFilesCount);
assertEquals(0, initialCacheDataBlockCount);
region.compact(true);
long compactedStoreFilesCount = hStore.getStorefilesCount();
long compactedCacheDataBlockCount = blockCache.getDataBlockCount();
assertEquals(1, compactedStoreFilesCount);
assertEquals(expectDataBlocksCached, compactedCacheDataBlockCount > 0);
}
private void createTestFilesForCompaction(HStore hStore) throws IOException {
long currentTime = System.currentTimeMillis();
Path storeDir = hStore.getStoreContext().getFamilyStoreDirectoryPath();
Configuration configuration = hStore.getReadOnlyConfiguration();
createHStoreFile(storeDir, configuration, currentTime - 2 * DAY,
hStore.getHRegion().getRegionFileSystem());
createHStoreFile(storeDir, configuration, currentTime - 3 * DAY,
hStore.getHRegion().getRegionFileSystem());
createHStoreFile(storeDir, configuration, currentTime - 4 * DAY,
hStore.getHRegion().getRegionFileSystem());
}
@Test
public void testPickColdDataFiles() throws IOException {
initializeTestEnvironment();
Map<String, String> coldDataFiles = dataTieringManager.getColdFilesList();
assertEquals(1, coldDataFiles.size());
// hStoreFiles[3] is the cold file.
assert (coldDataFiles.containsKey(hStoreFiles.get(3).getFileInfo().getActiveFileName()));
}
/*
* Verify that two cold blocks(both) are evicted when bucket reaches its capacity. The hot file
* remains in the cache.
*/
@Test
public void testBlockEvictions() throws Exception {
initializeTestEnvironment();
long capacitySize = 40 * 1024;
int writeThreads = 3;
int writerQLen = 64;
int[] bucketSizes = new int[] { 8 * 1024 + 1024 };
// Setup: Create a bucket cache with lower capacity
BucketCache bucketCache =
new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, 8192, bucketSizes,
writeThreads, writerQLen, null, DEFAULT_ERROR_TOLERATION_DURATION, defaultConf);
// Create three Cache keys with cold data files and a block with hot data.
// hStoreFiles.get(3) is a cold data file, while hStoreFiles.get(0) is a hot file.
Set<BlockCacheKey> cacheKeys = new HashSet<>();
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(3).getPath(), 0, true, BlockType.DATA));
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(3).getPath(), 8192, true, BlockType.DATA));
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(0).getPath(), 0, true, BlockType.DATA));
// Create dummy data to be cached and fill the cache completely.
CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(8192, 3);
int blocksIter = 0;
for (BlockCacheKey key : cacheKeys) {
bucketCache.cacheBlock(key, blocks[blocksIter++].getBlock());
// Ensure that the block is persisted to the file.
Waiter.waitFor(defaultConf, 10000, 100, () -> (bucketCache.getBackingMap().containsKey(key)));
}
// Verify that the bucket cache contains 3 blocks.
assertEquals(3, bucketCache.getBackingMap().keySet().size());
// Add an additional block into cache with hot data which should trigger the eviction
BlockCacheKey newKey = new BlockCacheKey(hStoreFiles.get(2).getPath(), 0, true, BlockType.DATA);
CacheTestUtils.HFileBlockPair[] newBlock = CacheTestUtils.generateHFileBlocks(8192, 1);
bucketCache.cacheBlock(newKey, newBlock[0].getBlock());
Waiter.waitFor(defaultConf, 10000, 100,
() -> (bucketCache.getBackingMap().containsKey(newKey)));
// Verify that the bucket cache now contains 2 hot blocks blocks only.
// Both cold blocks of 8KB will be evicted to make room for 1 block of 8KB + an additional
// space.
validateBlocks(bucketCache.getBackingMap().keySet(), 2, 2, 0);
}
/*
* Verify that two cold blocks(both) are evicted when bucket reaches its capacity, but one cold
* block remains in the cache since the required space is freed.
*/
@Test
public void testBlockEvictionsAllColdBlocks() throws Exception {
initializeTestEnvironment();
long capacitySize = 40 * 1024;
int writeThreads = 3;
int writerQLen = 64;
int[] bucketSizes = new int[] { 8 * 1024 + 1024 };
// Setup: Create a bucket cache with lower capacity
BucketCache bucketCache =
new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, 8192, bucketSizes,
writeThreads, writerQLen, null, DEFAULT_ERROR_TOLERATION_DURATION, defaultConf);
// Create three Cache keys with three cold data blocks.
// hStoreFiles.get(3) is a cold data file.
Set<BlockCacheKey> cacheKeys = new HashSet<>();
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(3).getPath(), 0, true, BlockType.DATA));
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(3).getPath(), 8192, true, BlockType.DATA));
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(3).getPath(), 16384, true, BlockType.DATA));
// Create dummy data to be cached and fill the cache completely.
CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(8192, 3);
int blocksIter = 0;
for (BlockCacheKey key : cacheKeys) {
bucketCache.cacheBlock(key, blocks[blocksIter++].getBlock());
// Ensure that the block is persisted to the file.
Waiter.waitFor(defaultConf, 10000, 100, () -> (bucketCache.getBackingMap().containsKey(key)));
}
// Verify that the bucket cache contains 3 blocks.
assertEquals(3, bucketCache.getBackingMap().keySet().size());
// Add an additional block into cache with hot data which should trigger the eviction
BlockCacheKey newKey = new BlockCacheKey(hStoreFiles.get(2).getPath(), 0, true, BlockType.DATA);
CacheTestUtils.HFileBlockPair[] newBlock = CacheTestUtils.generateHFileBlocks(8192, 1);
bucketCache.cacheBlock(newKey, newBlock[0].getBlock());
Waiter.waitFor(defaultConf, 10000, 100,
() -> (bucketCache.getBackingMap().containsKey(newKey)));
// Verify that the bucket cache now contains 1 cold block and a newly added hot block.
validateBlocks(bucketCache.getBackingMap().keySet(), 2, 1, 1);
}
/*
* Verify that a hot block evicted along with a cold block when bucket reaches its capacity.
*/
@Test
public void testBlockEvictionsHotBlocks() throws Exception {
initializeTestEnvironment();
long capacitySize = 40 * 1024;
int writeThreads = 3;
int writerQLen = 64;
int[] bucketSizes = new int[] { 8 * 1024 + 1024 };
// Setup: Create a bucket cache with lower capacity
BucketCache bucketCache =
new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, 8192, bucketSizes,
writeThreads, writerQLen, null, DEFAULT_ERROR_TOLERATION_DURATION, defaultConf);
// Create three Cache keys with two hot data blocks and one cold data block
// hStoreFiles.get(0) is a hot data file and hStoreFiles.get(3) is a cold data file.
Set<BlockCacheKey> cacheKeys = new HashSet<>();
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(0).getPath(), 0, true, BlockType.DATA));
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(0).getPath(), 8192, true, BlockType.DATA));
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(3).getPath(), 0, true, BlockType.DATA));
// Create dummy data to be cached and fill the cache completely.
CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(8192, 3);
int blocksIter = 0;
for (BlockCacheKey key : cacheKeys) {
bucketCache.cacheBlock(key, blocks[blocksIter++].getBlock());
// Ensure that the block is persisted to the file.
Waiter.waitFor(defaultConf, 10000, 100, () -> (bucketCache.getBackingMap().containsKey(key)));
}
// Verify that the bucket cache contains 3 blocks.
assertEquals(3, bucketCache.getBackingMap().keySet().size());
// Add an additional block which should evict the only cold block with an additional hot block.
BlockCacheKey newKey = new BlockCacheKey(hStoreFiles.get(2).getPath(), 0, true, BlockType.DATA);
CacheTestUtils.HFileBlockPair[] newBlock = CacheTestUtils.generateHFileBlocks(8192, 1);
bucketCache.cacheBlock(newKey, newBlock[0].getBlock());
Waiter.waitFor(defaultConf, 10000, 100,
() -> (bucketCache.getBackingMap().containsKey(newKey)));
// Verify that the bucket cache now contains 2 hot blocks.
// Only one of the older hot blocks is retained and other one is the newly added hot block.
validateBlocks(bucketCache.getBackingMap().keySet(), 2, 2, 0);
}
@Test
public void testFeatureKeyDisabled() throws Exception {
DataTieringManager.resetForTestingOnly();
defaultConf.setBoolean(DataTieringManager.GLOBAL_DATA_TIERING_ENABLED_KEY, false);
initializeTestEnvironment();
try {
assertFalse(DataTieringManager.instantiate(defaultConf, testOnlineRegions));
// Verify that the DataaTieringManager instance is not instantiated in the
// instantiate call above.
assertNull(DataTieringManager.getInstance());
// Also validate that data temperature is not honoured.
long capacitySize = 40 * 1024;
int writeThreads = 3;
int writerQLen = 64;
int[] bucketSizes = new int[] { 8 * 1024 + 1024 };
// Setup: Create a bucket cache with lower capacity
BucketCache bucketCache =
new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, 8192, bucketSizes,
writeThreads, writerQLen, null, DEFAULT_ERROR_TOLERATION_DURATION, defaultConf);
// Create three Cache keys with two hot data blocks and one cold data block
// hStoreFiles.get(0) is a hot data file and hStoreFiles.get(3) is a cold data file.
List<BlockCacheKey> cacheKeys = new ArrayList<>();
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(0).getPath(), 0, true, BlockType.DATA));
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(0).getPath(), 8192, true, BlockType.DATA));
cacheKeys.add(new BlockCacheKey(hStoreFiles.get(3).getPath(), 0, true, BlockType.DATA));
// Create dummy data to be cached and fill the cache completely.
CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(8192, 3);
int blocksIter = 0;
for (BlockCacheKey key : cacheKeys) {
LOG.info("Adding {}", key);
bucketCache.cacheBlock(key, blocks[blocksIter++].getBlock());
// Ensure that the block is persisted to the file.
Waiter.waitFor(defaultConf, 10000, 100,
() -> (bucketCache.getBackingMap().containsKey(key)));
}
// Verify that the bucket cache contains 3 blocks.
assertEquals(3, bucketCache.getBackingMap().keySet().size());
// Add an additional hot block, which triggers eviction.
BlockCacheKey newKey =
new BlockCacheKey(hStoreFiles.get(2).getPath(), 0, true, BlockType.DATA);
CacheTestUtils.HFileBlockPair[] newBlock = CacheTestUtils.generateHFileBlocks(8192, 1);
bucketCache.cacheBlock(newKey, newBlock[0].getBlock());
Waiter.waitFor(defaultConf, 10000, 100,
() -> (bucketCache.getBackingMap().containsKey(newKey)));
// Verify that the bucket still contains the only cold block and one newly added hot block.
// The older hot blocks are evicted and data-tiering mechanism does not kick in to evict
// the cold block.
validateBlocks(bucketCache.getBackingMap().keySet(), 2, 1, 1);
} finally {
DataTieringManager.resetForTestingOnly();
defaultConf.setBoolean(DataTieringManager.GLOBAL_DATA_TIERING_ENABLED_KEY, true);
assertTrue(DataTieringManager.instantiate(defaultConf, testOnlineRegions));
}
}
@Test
public void testCacheConfigShouldCacheFile() throws Exception {
// Evict the files from cache.
for (HStoreFile file : hStoreFiles) {
file.closeStoreFile(true);
}
// Verify that the API shouldCacheFileBlock returns the result correctly.
// hStoreFiles[0], hStoreFiles[1], hStoreFiles[2] are hot files.
// hStoreFiles[3] is a cold file.
try {
assertTrue(cacheConf.shouldCacheBlockOnRead(BlockCategory.DATA,
hStoreFiles.get(0).getFileInfo().getHFileInfo(),
hStoreFiles.get(0).getFileInfo().getConf()));
assertTrue(cacheConf.shouldCacheBlockOnRead(BlockCategory.DATA,
hStoreFiles.get(1).getFileInfo().getHFileInfo(),
hStoreFiles.get(1).getFileInfo().getConf()));
assertTrue(cacheConf.shouldCacheBlockOnRead(BlockCategory.DATA,
hStoreFiles.get(2).getFileInfo().getHFileInfo(),
hStoreFiles.get(2).getFileInfo().getConf()));
assertFalse(cacheConf.shouldCacheBlockOnRead(BlockCategory.DATA,
hStoreFiles.get(3).getFileInfo().getHFileInfo(),
hStoreFiles.get(3).getFileInfo().getConf()));
} finally {
for (HStoreFile file : hStoreFiles) {
file.initReader();
}
}
}
@Test
public void testCacheOnReadColdFile() throws Exception {
initializeTestEnvironment();
// hStoreFiles[3] is a cold file. the blocks should not get loaded after a readBlock call.
HStoreFile hStoreFile = hStoreFiles.get(3);
BlockCacheKey cacheKey = new BlockCacheKey(hStoreFile.getPath(), 0, true, BlockType.DATA);
testCacheOnRead(hStoreFile, cacheKey, -1, false);
}
@Test
public void testCacheOnReadHotFile() throws Exception {
initializeTestEnvironment();
// hStoreFiles[0] is a hot file. the blocks should get loaded after a readBlock call.
HStoreFile hStoreFile = hStoreFiles.get(0);
BlockCacheKey cacheKey =
new BlockCacheKey(hStoreFiles.get(0).getPath(), 0, true, BlockType.DATA);
testCacheOnRead(hStoreFile, cacheKey, -1, true);
}
private void testCacheOnRead(HStoreFile hStoreFile, BlockCacheKey key, long onDiskBlockSize,
boolean expectedCached) throws Exception {
// Execute the read block API which will try to cache the block if the block is a hot block.
hStoreFile.getReader().getHFileReader().readBlock(key.getOffset(), onDiskBlockSize, true, false,
false, false, key.getBlockType(), DataBlockEncoding.NONE);
// Validate that the hot block gets cached and cold block is not cached.
HFileBlock block = (HFileBlock) blockCache.getBlock(key, false, false, false, BlockType.DATA);
if (expectedCached) {
assertNotNull(block);
} else {
assertNull(block);
}
}
private void validateBlocks(Set<BlockCacheKey> keys, int expectedTotalKeys, int expectedHotBlocks,
int expectedColdBlocks) {
int numHotBlocks = 0, numColdBlocks = 0;
Waiter.waitFor(defaultConf, 10000, 100, () -> (expectedTotalKeys == keys.size()));
int iter = 0;
for (BlockCacheKey key : keys) {
try {
if (dataTieringManager.isHotData(key)) {
numHotBlocks++;
} else {
numColdBlocks++;
}
} catch (Exception e) {
fail("Unexpected exception!");
}
}
assertEquals(expectedHotBlocks, numHotBlocks);
assertEquals(expectedColdBlocks, numColdBlocks);
}
private void testDataTieringMethodWithPath(DataTieringMethodCallerWithPath caller, Path path,
boolean expectedResult, DataTieringException exception) {
try {
boolean value = caller.call(dataTieringManager, path);
if (exception != null) {
fail("Expected DataTieringException to be thrown");
}
assertEquals(expectedResult, value);
} catch (DataTieringException e) {
if (exception == null) {
fail("Unexpected DataTieringException: " + e.getMessage());
}
assertEquals(exception.getMessage(), e.getMessage());
}
}
private void testDataTieringMethodWithKey(DataTieringMethodCallerWithKey caller,
BlockCacheKey key, boolean expectedResult, DataTieringException exception) {
try {
boolean value = caller.call(dataTieringManager, key);
if (exception != null) {
fail("Expected DataTieringException to be thrown");
}
assertEquals(expectedResult, value);
} catch (DataTieringException e) {
if (exception == null) {
fail("Unexpected DataTieringException: " + e.getMessage());
}
assertEquals(exception.getMessage(), e.getMessage());
}
}
private void testDataTieringMethodWithPathExpectingException(
DataTieringMethodCallerWithPath caller, Path path, DataTieringException exception) {
testDataTieringMethodWithPath(caller, path, false, exception);
}
private void testDataTieringMethodWithPathNoException(DataTieringMethodCallerWithPath caller,
Path path, boolean expectedResult) {
testDataTieringMethodWithPath(caller, path, expectedResult, null);
}
private void testDataTieringMethodWithKeyExpectingException(DataTieringMethodCallerWithKey caller,
BlockCacheKey key, DataTieringException exception) {
testDataTieringMethodWithKey(caller, key, false, exception);
}
private void testDataTieringMethodWithKeyNoException(DataTieringMethodCallerWithKey caller,
BlockCacheKey key, boolean expectedResult) {
testDataTieringMethodWithKey(caller, key, expectedResult, null);
}
private static void initializeTestEnvironment() throws IOException {
setupFileSystemAndCache();
setupOnlineRegions();
}
private static void setupFileSystemAndCache() throws IOException {
fs = HFileSystem.get(defaultConf);
blockCache = BlockCacheFactory.createBlockCache(defaultConf);
cacheConf = new CacheConfig(defaultConf, blockCache);
}
private static void setupOnlineRegions() throws IOException {
testOnlineRegions.clear();
hStoreFiles.clear();
long day = 24 * 60 * 60 * 1000;
long currentTime = System.currentTimeMillis();
HRegion region1 = createHRegion("table1");
HStore hStore11 = createHStore(region1, "cf1", getConfWithTimeRangeDataTieringEnabled(day));
hStoreFiles.add(createHStoreFile(hStore11.getStoreContext().getFamilyStoreDirectoryPath(),
hStore11.getReadOnlyConfiguration(), currentTime, region1.getRegionFileSystem()));
hStore11.refreshStoreFiles();
HStore hStore12 = createHStore(region1, "cf2");
hStoreFiles.add(createHStoreFile(hStore12.getStoreContext().getFamilyStoreDirectoryPath(),
hStore12.getReadOnlyConfiguration(), currentTime - day, region1.getRegionFileSystem()));
hStore12.refreshStoreFiles();
region1.stores.put(Bytes.toBytes("cf1"), hStore11);
region1.stores.put(Bytes.toBytes("cf2"), hStore12);
HRegion region2 =
createHRegion("table2", getConfWithTimeRangeDataTieringEnabled((long) (2.5 * day)));
HStore hStore21 = createHStore(region2, "cf1");
hStoreFiles.add(createHStoreFile(hStore21.getStoreContext().getFamilyStoreDirectoryPath(),
hStore21.getReadOnlyConfiguration(), currentTime - 2 * day, region2.getRegionFileSystem()));
hStore21.refreshStoreFiles();
HStore hStore22 = createHStore(region2, "cf2");
hStoreFiles.add(createHStoreFile(hStore22.getStoreContext().getFamilyStoreDirectoryPath(),
hStore22.getReadOnlyConfiguration(), currentTime - 3 * day, region2.getRegionFileSystem()));
hStore22.refreshStoreFiles();
region2.stores.put(Bytes.toBytes("cf1"), hStore21);
region2.stores.put(Bytes.toBytes("cf2"), hStore22);
for (HStoreFile file : hStoreFiles) {
file.initReader();
}
testOnlineRegions.put(region1.getRegionInfo().getEncodedName(), region1);
testOnlineRegions.put(region2.getRegionInfo().getEncodedName(), region2);
}
private static HRegion createHRegion(String table) throws IOException {
return createHRegion(table, defaultConf);
}
private static HRegion createHRegion(String table, Configuration conf) throws IOException {
TableName tableName = TableName.valueOf(table);
TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName)
.setValue(DataTieringManager.DATATIERING_KEY, conf.get(DataTieringManager.DATATIERING_KEY))
.setValue(DataTieringManager.DATATIERING_HOT_DATA_AGE_KEY,
conf.get(DataTieringManager.DATATIERING_HOT_DATA_AGE_KEY))
.build();
RegionInfo hri = RegionInfoBuilder.newBuilder(tableName).build();
Configuration testConf = new Configuration(conf);
CommonFSUtils.setRootDir(testConf, testDir);
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs,
CommonFSUtils.getTableDir(testDir, hri.getTable()), hri);
HRegion region = new HRegion(regionFs, null, conf, htd, null);
// Manually sets the BlockCache for the HRegion instance.
// This is necessary because the region server is not started within this method,
// and therefore the BlockCache needs to be explicitly configured.
region.setBlockCache(blockCache);
return region;
}
private static HStore createHStore(HRegion region, String columnFamily) throws IOException {
return createHStore(region, columnFamily, defaultConf);
}
private static HStore createHStore(HRegion region, String columnFamily, Configuration conf)
throws IOException {
ColumnFamilyDescriptor columnFamilyDescriptor =
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(columnFamily))
.setValue(DataTieringManager.DATATIERING_KEY, conf.get(DataTieringManager.DATATIERING_KEY))
.setValue(DataTieringManager.DATATIERING_HOT_DATA_AGE_KEY,
conf.get(DataTieringManager.DATATIERING_HOT_DATA_AGE_KEY))
.build();
return new HStore(region, columnFamilyDescriptor, conf, false);
}
private static Configuration getConfWithTimeRangeDataTieringEnabled(long hotDataAge) {
Configuration conf = new Configuration(defaultConf);
conf.set(DataTieringManager.DATATIERING_KEY, DataTieringType.TIME_RANGE.name());
conf.set(DataTieringManager.DATATIERING_HOT_DATA_AGE_KEY, String.valueOf(hotDataAge));
return conf;
}
static HStoreFile createHStoreFile(Path storeDir, Configuration conf, long timestamp,
HRegionFileSystem regionFs) throws IOException {
String columnFamily = storeDir.getName();
StoreFileWriter storeFileWriter = new StoreFileWriter.Builder(conf, cacheConf, fs)
.withOutputDir(storeDir).withFileContext(new HFileContextBuilder().build()).build();
writeStoreFileRandomData(storeFileWriter, Bytes.toBytes(columnFamily), timestamp);
StoreContext storeContext = StoreContext.getBuilder().withRegionFileSystem(regionFs).build();
StoreFileTracker sft = StoreFileTrackerFactory.create(conf, true, storeContext);
return new HStoreFile(fs, storeFileWriter.getPath(), conf, cacheConf, BloomType.NONE, true,
sft);
}
/**
* Writes random data to a store file with rows arranged in lexicographically increasing order.
* Each row is generated using the {@link #nextString()} method, ensuring that each subsequent row
* is lexicographically larger than the previous one.
*/
private static void writeStoreFileRandomData(final StoreFileWriter writer, byte[] columnFamily,
long timestamp) throws IOException {
int cellsPerFile = 10;
byte[] qualifier = Bytes.toBytes("qualifier");
byte[] value = generateRandomBytes(4 * 1024);
try {
for (int i = 0; i < cellsPerFile; i++) {
byte[] row = Bytes.toBytes(nextString());
writer.append(new KeyValue(row, columnFamily, qualifier, timestamp, value));
}
} finally {
writer.appendTrackedTimestampsToMetadata();
writer.close();
}
}
private static byte[] generateRandomBytes(int sizeInBytes) {
Random random = new Random();
byte[] randomBytes = new byte[sizeInBytes];
random.nextBytes(randomBytes);
return randomBytes;
}
/**
* Returns the lexicographically larger string every time it's called.
*/
private static String nextString() {
if (rowKeyString == null || rowKeyString.isEmpty()) {
rowKeyString = "a";
}
char lastChar = rowKeyString.charAt(rowKeyString.length() - 1);
if (lastChar < 'z') {
rowKeyString = rowKeyString.substring(0, rowKeyString.length() - 1) + (char) (lastChar + 1);
} else {
rowKeyString = rowKeyString + "a";
}
return rowKeyString;
}
}
|
googleapis/google-cloud-java | 38,140 | java-securitycenter/proto-google-cloud-securitycenter-v1p1beta1/src/main/java/com/google/cloud/securitycenter/v1p1beta1/CreateFindingRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v1p1beta1/securitycenter_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycenter.v1p1beta1;
/**
*
*
* <pre>
* Request message for creating a finding.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1p1beta1.CreateFindingRequest}
*/
public final class CreateFindingRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1p1beta1.CreateFindingRequest)
CreateFindingRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateFindingRequest.newBuilder() to construct.
private CreateFindingRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateFindingRequest() {
parent_ = "";
findingId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateFindingRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_CreateFindingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_CreateFindingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest.class,
com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Resource name of the new finding's parent. Its format should be
* "organizations/[organization_id]/sources/[source_id]".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Resource name of the new finding's parent. Its format should be
* "organizations/[organization_id]/sources/[source_id]".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FINDING_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object findingId_ = "";
/**
*
*
* <pre>
* Required. Unique identifier provided by the client within the parent scope.
* </pre>
*
* <code>string finding_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The findingId.
*/
@java.lang.Override
public java.lang.String getFindingId() {
java.lang.Object ref = findingId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
findingId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Unique identifier provided by the client within the parent scope.
* </pre>
*
* <code>string finding_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for findingId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFindingIdBytes() {
java.lang.Object ref = findingId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
findingId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FINDING_FIELD_NUMBER = 3;
private com.google.cloud.securitycenter.v1p1beta1.Finding finding_;
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the finding field is set.
*/
@java.lang.Override
public boolean hasFinding() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The finding.
*/
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.Finding getFinding() {
return finding_ == null
? com.google.cloud.securitycenter.v1p1beta1.Finding.getDefaultInstance()
: finding_;
}
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.FindingOrBuilder getFindingOrBuilder() {
return finding_ == null
? com.google.cloud.securitycenter.v1p1beta1.Finding.getDefaultInstance()
: finding_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(findingId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, findingId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getFinding());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(findingId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, findingId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getFinding());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest other =
(com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFindingId().equals(other.getFindingId())) return false;
if (hasFinding() != other.hasFinding()) return false;
if (hasFinding()) {
if (!getFinding().equals(other.getFinding())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FINDING_ID_FIELD_NUMBER;
hash = (53 * hash) + getFindingId().hashCode();
if (hasFinding()) {
hash = (37 * hash) + FINDING_FIELD_NUMBER;
hash = (53 * hash) + getFinding().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for creating a finding.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1p1beta1.CreateFindingRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1p1beta1.CreateFindingRequest)
com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_CreateFindingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_CreateFindingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest.class,
com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest.Builder.class);
}
// Construct using com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getFindingFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
findingId_ = "";
finding_ = null;
if (findingBuilder_ != null) {
findingBuilder_.dispose();
findingBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_CreateFindingRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest
getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest build() {
com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest buildPartial() {
com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest result =
new com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.findingId_ = findingId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.finding_ = findingBuilder_ == null ? finding_ : findingBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest) {
return mergeFrom((com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest other) {
if (other
== com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFindingId().isEmpty()) {
findingId_ = other.findingId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasFinding()) {
mergeFinding(other.getFinding());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
findingId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getFindingFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Resource name of the new finding's parent. Its format should be
* "organizations/[organization_id]/sources/[source_id]".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the new finding's parent. Its format should be
* "organizations/[organization_id]/sources/[source_id]".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name of the new finding's parent. Its format should be
* "organizations/[organization_id]/sources/[source_id]".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the new finding's parent. Its format should be
* "organizations/[organization_id]/sources/[source_id]".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name of the new finding's parent. Its format should be
* "organizations/[organization_id]/sources/[source_id]".
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object findingId_ = "";
/**
*
*
* <pre>
* Required. Unique identifier provided by the client within the parent scope.
* </pre>
*
* <code>string finding_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The findingId.
*/
public java.lang.String getFindingId() {
java.lang.Object ref = findingId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
findingId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Unique identifier provided by the client within the parent scope.
* </pre>
*
* <code>string finding_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for findingId.
*/
public com.google.protobuf.ByteString getFindingIdBytes() {
java.lang.Object ref = findingId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
findingId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Unique identifier provided by the client within the parent scope.
* </pre>
*
* <code>string finding_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The findingId to set.
* @return This builder for chaining.
*/
public Builder setFindingId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
findingId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Unique identifier provided by the client within the parent scope.
* </pre>
*
* <code>string finding_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearFindingId() {
findingId_ = getDefaultInstance().getFindingId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Unique identifier provided by the client within the parent scope.
* </pre>
*
* <code>string finding_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for findingId to set.
* @return This builder for chaining.
*/
public Builder setFindingIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
findingId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.securitycenter.v1p1beta1.Finding finding_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v1p1beta1.Finding,
com.google.cloud.securitycenter.v1p1beta1.Finding.Builder,
com.google.cloud.securitycenter.v1p1beta1.FindingOrBuilder>
findingBuilder_;
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the finding field is set.
*/
public boolean hasFinding() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The finding.
*/
public com.google.cloud.securitycenter.v1p1beta1.Finding getFinding() {
if (findingBuilder_ == null) {
return finding_ == null
? com.google.cloud.securitycenter.v1p1beta1.Finding.getDefaultInstance()
: finding_;
} else {
return findingBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setFinding(com.google.cloud.securitycenter.v1p1beta1.Finding value) {
if (findingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
finding_ = value;
} else {
findingBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setFinding(
com.google.cloud.securitycenter.v1p1beta1.Finding.Builder builderForValue) {
if (findingBuilder_ == null) {
finding_ = builderForValue.build();
} else {
findingBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeFinding(com.google.cloud.securitycenter.v1p1beta1.Finding value) {
if (findingBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& finding_ != null
&& finding_ != com.google.cloud.securitycenter.v1p1beta1.Finding.getDefaultInstance()) {
getFindingBuilder().mergeFrom(value);
} else {
finding_ = value;
}
} else {
findingBuilder_.mergeFrom(value);
}
if (finding_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearFinding() {
bitField0_ = (bitField0_ & ~0x00000004);
finding_ = null;
if (findingBuilder_ != null) {
findingBuilder_.dispose();
findingBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securitycenter.v1p1beta1.Finding.Builder getFindingBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getFindingFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securitycenter.v1p1beta1.FindingOrBuilder getFindingOrBuilder() {
if (findingBuilder_ != null) {
return findingBuilder_.getMessageOrBuilder();
} else {
return finding_ == null
? com.google.cloud.securitycenter.v1p1beta1.Finding.getDefaultInstance()
: finding_;
}
}
/**
*
*
* <pre>
* Required. The Finding being created. The name and security_marks will be ignored as
* they are both output only fields on this resource.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v1p1beta1.Finding finding = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v1p1beta1.Finding,
com.google.cloud.securitycenter.v1p1beta1.Finding.Builder,
com.google.cloud.securitycenter.v1p1beta1.FindingOrBuilder>
getFindingFieldBuilder() {
if (findingBuilder_ == null) {
findingBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v1p1beta1.Finding,
com.google.cloud.securitycenter.v1p1beta1.Finding.Builder,
com.google.cloud.securitycenter.v1p1beta1.FindingOrBuilder>(
getFinding(), getParentForChildren(), isClean());
finding_ = null;
}
return findingBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1p1beta1.CreateFindingRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1p1beta1.CreateFindingRequest)
private static final com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest();
}
public static com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateFindingRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateFindingRequest>() {
@java.lang.Override
public CreateFindingRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateFindingRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateFindingRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.CreateFindingRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/cxf | 38,217 | maven-plugins/codegen-plugin/src/main/java/org/apache/cxf/maven_plugin/AbstractCodegenMojo.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.maven_plugin;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.Authenticator;
import java.net.PasswordAuthentication;
import java.net.URI;
import java.net.URL;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.SystemUtils;
import org.apache.cxf.Bus;
import org.apache.cxf.common.util.CollectionUtils;
import org.apache.cxf.common.util.SystemPropertyAction;
import org.apache.cxf.common.util.URIParserUtil;
import org.apache.cxf.helpers.CastUtils;
import org.apache.cxf.helpers.FileUtils;
import org.apache.cxf.helpers.JavaUtils;
import org.apache.maven.ProjectDependenciesResolver;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.resolver.AbstractArtifactResolutionException;
import org.apache.maven.artifact.resolver.ArtifactResolutionRequest;
import org.apache.maven.artifact.resolver.ArtifactResolutionResult;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Resource;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import org.apache.maven.repository.RepositorySystem;
import org.apache.maven.settings.Proxy;
import org.apache.maven.toolchain.Toolchain;
import org.apache.maven.toolchain.ToolchainManager;
import org.codehaus.plexus.archiver.jar.JarArchiver;
import org.codehaus.plexus.archiver.jar.Manifest;
import org.codehaus.plexus.archiver.jar.Manifest.Attribute;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.cli.CommandLineException;
import org.codehaus.plexus.util.cli.CommandLineUtils;
import org.codehaus.plexus.util.cli.Commandline;
import org.codehaus.plexus.util.cli.StreamConsumer;
import org.sonatype.plexus.build.incremental.BuildContext;
public abstract class AbstractCodegenMojo extends AbstractMojo {
/**
* JVM/System property name holding the hostname of the http proxy.
*/
private static final String HTTP_PROXY_HOST = "http.proxyHost";
/**
* JVM/System property name holding the port of the http proxy.
*/
private static final String HTTP_PROXY_PORT = "http.proxyPort";
/**
* JVM/System property name holding the list of hosts/patterns that
* should not use the proxy configuration.
*/
private static final String HTTP_NON_PROXY_HOSTS = "http.nonProxyHosts";
/**
* JVM/System property name holding the username of the http proxy.
*/
private static final String HTTP_PROXY_USER = "http.proxyUser";
/**
* JVM/System property name holding the password of the http proxy.
*/
private static final String HTTP_PROXY_PASSWORD = "http.proxyPassword";
@Parameter(property = "project.build.outputDirectory", required = true)
protected String classesDirectory;
/**
* By default all maven dependencies of type "wsdl" are added to the effective wsdlOptions. Setting this
* parameter to true disables this functionality
*/
@Parameter(property = "cxf.disableDependencyScan", defaultValue = "false")
protected boolean disableDependencyScan;
/**
* Disables the scanning of the wsdlRoot/testWsdlRoot directories.
* By default, we scan for *.wsdl (see include/exclude params as well) in the wsdlRoot
* directories and run the tool on all the wsdls we find. This disables that scan
* and requires an explicit wsdlOption to be set for each wsdl that needs to be processed.
*/
@Parameter(property = "cxf.disableDirectoryScan", defaultValue = "false")
protected boolean disableDirectoryScan;
/**
* Allows running the JavaToWs in a separate process. Valid values are "false", "always", and "once" The
* value of "true" is equal to "once"
*/
@Parameter(defaultValue = "false")
protected String fork;
/**
* A list of wsdl files to include. Can contain ant-style wildcards and double wildcards. Defaults to
* *.wsdl
*/
@Parameter
protected String[] includes;
/**
* Directory in which the "DONE" markers are saved that
*/
@Parameter(property = "cxf.markerDirectory", defaultValue = "${project.build.directory}/cxf-codegen-plugin-markers")
protected File markerDirectory;
/**
* The plugin dependencies, needed for the fork mode
*/
@Parameter(required = true, readonly = true, property = "plugin.artifacts")
protected List<Artifact> pluginArtifacts;
@Parameter(required = true, property = "project")
protected MavenProject project;
/**
* Use the compile classpath rather than the test classpath for execution useful if the test dependencies
* clash with those of wsdl2java
*/
@Parameter(property = "cxf.useCompileClasspath", defaultValue = "false")
protected boolean useCompileClasspath;
/**
* A list of wsdl files to exclude. Can contain ant-style wildcards and double wildcards.
*/
@Parameter
protected String[] excludes;
@Parameter(property = "cxf.testWsdlRoot", defaultValue = "${basedir}/src/test/resources/wsdl")
protected File testWsdlRoot;
@Parameter(property = "cxf.wsdlRoot", defaultValue = "${basedir}/src/main/resources/wsdl")
protected File wsdlRoot;
@Parameter(property = "cxf.skipGarbageCollection", defaultValue = "false")
protected boolean skipGarbageCollection;
@Component
protected BuildContext buildContext;
/**
* Sets the JVM arguments (i.e. <code>-Xms128m -Xmx128m</code>) if fork is not set to <code>false</code>.
*/
@Parameter(property = "cxf.codegen.jvmArgs")
private String additionalJvmArgs;
/**
* Sets the Java executable to use when fork parameter is <code>true</code>.
*/
@Parameter
private String javaExecutable;
/**
* The toolchain manager.
*/
@Component
private ToolchainManager toolchainManager;
/**
* The Maven session.
*/
@Parameter(readonly = true, required = true, property = "session")
private MavenSession mavenSession;
@Component
private ProjectDependenciesResolver projectDependencyResolver;
@Component
private RepositorySystem repositorySystem;
public AbstractCodegenMojo() {
super();
}
public void execute() throws MojoExecutionException {
if (JavaUtils.isJava9Compatible()) {
fork = "true";
}
System.setProperty("org.apache.cxf.JDKBugHacks.defaultUsesCaches", "true");
// add the generated source into compile source
// do this step first to ensure the source folder will be added to the Eclipse classpath
if (project != null && getGeneratedSourceRoot() != null) {
project.addCompileSourceRoot(getGeneratedSourceRoot().getAbsolutePath());
}
if (project != null && getGeneratedTestRoot() != null) {
project.addTestCompileSourceRoot(getGeneratedTestRoot().getAbsolutePath());
}
checkResources();
// if this is an m2e configuration build then return immediately without doing any work
if (project != null && buildContext.isIncremental() && !buildContext.hasDelta(project.getBasedir())) {
return;
}
File classesDir = new File(classesDirectory);
/*
* This shouldn't be needed, but it's harmless.
*/
classesDir.mkdirs();
if (includes == null) {
includes = new String[] {
"*.wsdl"
};
}
markerDirectory.mkdirs();
String originalProxyHost = SystemPropertyAction.getProperty(HTTP_PROXY_HOST);
String originalProxyPort = SystemPropertyAction.getProperty(HTTP_PROXY_PORT);
String originalNonProxyHosts = SystemPropertyAction.getProperty(HTTP_NON_PROXY_HOSTS);
String originalProxyUser = SystemPropertyAction.getProperty(HTTP_PROXY_USER);
String originalProxyPassword = SystemPropertyAction.getProperty(HTTP_PROXY_PASSWORD);
Bus bus = null;
ClassLoaderSwitcher classLoaderSwitcher = null;
try {
configureProxyServerSettings();
List<GenericWsdlOption> effectiveWsdlOptions = createWsdlOptionsFromScansAndExplicitWsdlOptions();
if (effectiveWsdlOptions.isEmpty()) {
getLog().info("Nothing to generate");
return;
}
classLoaderSwitcher = new ClassLoaderSwitcher(getLog());
boolean result = true;
Set<URI> cp = classLoaderSwitcher.switchClassLoader(project, useCompileClasspath, classesDir);
if ("once".equals(fork) || "true".equals(fork)) {
forkOnce(cp, effectiveWsdlOptions);
} else {
for (GenericWsdlOption o : effectiveWsdlOptions) {
bus = generate(o, bus, cp);
File[] dirs = o.getDeleteDirs();
if (dirs != null) {
for (int idx = 0; idx < dirs.length; ++idx) {
result = result && deleteDir(dirs[idx]);
}
}
}
}
} finally {
// cleanup as much as we can.
if (bus != null) {
bus.shutdown(true);
}
if (classLoaderSwitcher != null) {
classLoaderSwitcher.restoreClassLoader();
}
restoreProxySetting(originalProxyHost, originalProxyPort, originalNonProxyHosts,
originalProxyUser, originalProxyPassword);
}
checkResources();
// refresh the generated sources
if (project != null && getGeneratedSourceRoot() != null && getGeneratedSourceRoot().exists()) {
buildContext.refresh(getGeneratedSourceRoot().getAbsoluteFile());
}
if (project != null && getGeneratedTestRoot() != null && getGeneratedTestRoot().exists()) {
buildContext.refresh(getGeneratedTestRoot().getAbsoluteFile());
}
if (!skipGarbageCollection) {
System.gc();
}
}
private void checkResources() {
File root = project.getBasedir();
Resource sourceRoot = null;
Resource testRoot = null;
File genroot = getGeneratedSourceRoot();
if (genroot != null) {
List<Resource> resources = project.getBuild().getResources();
for (Resource r : resources) {
File d = new File(root, r.getDirectory());
if (d.equals(genroot)) {
sourceRoot = r;
}
}
Resource r2 = scanForResources(genroot, sourceRoot);
if (r2 != sourceRoot) {
r2.setDirectory(getGeneratedSourceRoot().getAbsolutePath());
project.addResource(r2);
}
}
genroot = getGeneratedTestRoot();
if (genroot != null) {
List<Resource> resources = project.getBuild().getTestResources();
for (Resource r : resources) {
File d = new File(root, r.getDirectory());
if (d.equals(genroot)) {
testRoot = r;
}
}
Resource r2 = scanForResources(genroot, testRoot);
if (r2 != testRoot) {
r2.setDirectory(getGeneratedTestRoot().getAbsolutePath());
project.addTestResource(r2);
}
}
}
private Resource scanForResources(File rootFile, Resource root) {
File[] files = rootFile.listFiles();
if (files == null) {
return root;
}
for (File f : files) {
if (f.isDirectory()) {
root = scanForResources(f, root);
} else if (!f.getName().endsWith(".java")) {
String n = f.getName();
int idx = n.lastIndexOf('.');
if (idx != -1) {
n = "**/*" + n.substring(idx);
}
if (root == null) {
root = new Resource();
}
if (!root.getIncludes().contains(n)) {
root.addInclude(n);
}
}
}
return root;
}
private void restoreProxySetting(String originalProxyHost, String originalProxyPort,
String originalNonProxyHosts,
String originalProxyUser,
String originalProxyPassword) {
if (originalProxyHost != null) {
System.setProperty(HTTP_PROXY_HOST, originalProxyHost);
} else {
System.getProperties().remove(HTTP_PROXY_HOST);
}
if (originalProxyPort != null) {
System.setProperty(HTTP_PROXY_PORT, originalProxyPort);
} else {
System.getProperties().remove(HTTP_PROXY_PORT);
}
if (originalNonProxyHosts != null) {
System.setProperty(HTTP_NON_PROXY_HOSTS, originalNonProxyHosts);
} else {
System.getProperties().remove(HTTP_NON_PROXY_HOSTS);
}
if (originalProxyUser != null) {
System.setProperty(HTTP_PROXY_USER, originalProxyUser);
} else {
System.getProperties().remove(HTTP_PROXY_USER);
}
if (originalProxyPassword != null) {
System.setProperty(HTTP_PROXY_PASSWORD, originalProxyPassword);
} else {
System.getProperties().remove(HTTP_PROXY_PASSWORD);
}
Proxy proxy = mavenSession.getSettings().getActiveProxy();
if (proxy != null && !StringUtils.isEmpty(proxy.getUsername())
&& !StringUtils.isEmpty(proxy.getPassword())) {
Authenticator.setDefault(null);
}
}
protected abstract Bus generate(GenericWsdlOption o,
Bus bus, Set<URI> cp) throws MojoExecutionException;
protected void addPluginArtifact(Set<URI> artifactsPath) {
// for Maven 2.x, the actual artifact isn't in the list.... need to try and find it
URL url = getClass().getResource(getClass().getSimpleName() + ".class");
try {
if ("jar".equals(url.getProtocol())) {
String s = url.getPath();
if (s.contains("!")) {
s = s.substring(0, s.indexOf('!'));
url = new URL(s);
}
}
URI uri = new URI(url.getProtocol(), null, url.getPath(), null, null);
if (uri.getSchemeSpecificPart().endsWith(".class")) {
String s = uri.toString();
s = s.substring(0, s.length() - 6 - getClass().getName().length());
uri = new URI(s);
}
File file = new File(uri);
if (file.exists()) {
artifactsPath.add(file.toURI());
}
} catch (Exception ex) {
// ex.printStackTrace();
}
}
protected void configureProxyServerSettings() throws MojoExecutionException {
Proxy proxy = mavenSession.getSettings().getActiveProxy();
if (proxy != null) {
getLog().info("Using proxy server configured in maven.");
if (proxy.getHost() == null) {
throw new MojoExecutionException("Proxy in settings.xml has no host");
}
if (proxy.getHost() != null) {
System.setProperty(HTTP_PROXY_HOST, proxy.getHost());
}
if (String.valueOf(proxy.getPort()) != null) {
System.setProperty(HTTP_PROXY_PORT, String.valueOf(proxy.getPort()));
}
if (proxy.getNonProxyHosts() != null) {
System.setProperty(HTTP_NON_PROXY_HOSTS, proxy.getNonProxyHosts());
}
if (!StringUtils.isEmpty(proxy.getUsername())
&& !StringUtils.isEmpty(proxy.getPassword())) {
final String authUser = proxy.getUsername();
final String authPassword = proxy.getPassword();
Authenticator.setDefault(new Authenticator() {
public PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(authUser, authPassword.toCharArray());
}
});
System.setProperty(HTTP_PROXY_USER, authUser);
System.setProperty(HTTP_PROXY_PASSWORD, authPassword);
}
}
}
protected abstract List<GenericWsdlOption> createWsdlOptionsFromScansAndExplicitWsdlOptions()
throws MojoExecutionException;
/**
* Recursively delete the given directory
*
* @param f
* @return
*/
protected boolean deleteDir(File f) {
if (f.isDirectory()) {
File[] files = f.listFiles();
if (files != null) {
for (int idx = 0; idx < files.length; ++idx) {
deleteDir(files[idx]);
}
}
}
if (f.exists()) {
return f.delete();
}
buildContext.refresh(f.getParentFile());
return true;
}
protected abstract String getMarkerSuffix();
protected List<String> generateCommandLine(GenericWsdlOption wsdlOption)
throws MojoExecutionException {
File outputDirFile = wsdlOption.getOutputDir();
outputDirFile.mkdirs();
URI basedir = project.getBasedir().toURI();
URI wsdlURI = getWsdlURI(wsdlOption, basedir);
return wsdlOption.generateCommandLine(outputDirFile, basedir, wsdlURI,
getLog().isDebugEnabled());
}
protected void forkOnce(Set<URI> classPath, List<GenericWsdlOption> effectiveWsdlOptions)
throws MojoExecutionException {
List<GenericWsdlOption> toDo = new LinkedList<>();
List<List<String>> wargs = new LinkedList<>();
for (GenericWsdlOption wsdlOption : effectiveWsdlOptions) {
File outputDirFile = wsdlOption.getOutputDir();
outputDirFile.mkdirs();
URI basedir = project.getBasedir().toURI();
URI wsdlURI = getWsdlURI(wsdlOption, basedir);
File doneFile = getDoneFile(basedir, wsdlURI, getMarkerSuffix());
if (!shouldRun(wsdlOption, doneFile, wsdlURI)) {
continue;
}
doneFile.delete();
toDo.add(wsdlOption);
wargs.add(generateCommandLine(wsdlOption));
}
if (wargs.isEmpty()) {
return;
}
Set<URI> artifactsPath = new LinkedHashSet<>();
for (Artifact a : pluginArtifacts) {
File file = a.getFile();
if (file == null) {
throw new MojoExecutionException("Unable to find file for artifact " + a.getGroupId()
+ ":" + a.getArtifactId() + ":" + a.getVersion());
}
artifactsPath.add(file.toURI());
}
addPluginArtifact(artifactsPath);
artifactsPath.addAll(classPath);
String[] args = createForkOnceArgs(wargs);
runForked(artifactsPath, getForkClass().getName(), args);
for (GenericWsdlOption wsdlOption : toDo) {
File[] dirs = wsdlOption.getDeleteDirs();
if (dirs != null) {
for (int idx = 0; idx < dirs.length; ++idx) {
deleteDir(dirs[idx]);
}
}
URI basedir = project.getBasedir().toURI();
URI wsdlURI = getWsdlURI(wsdlOption, basedir);
File doneFile = getDoneFile(basedir, wsdlURI, getMarkerSuffix());
try {
createMarkerFile(wsdlOption, doneFile, wsdlURI);
} catch (Throwable e) {
getLog().warn("Could not create marker file " + doneFile.getAbsolutePath());
getLog().debug(e);
}
}
}
protected abstract Class<?> getForkClass();
protected File getDoneFile(URI basedir, URI wsdlURI, String mojo) {
String doneFileName = mojo + wsdlURI.toString();
try {
MessageDigest cript = MessageDigest.getInstance("SHA-1");
cript.reset();
cript.update(doneFileName.getBytes("utf8"));
doneFileName = new jakarta.xml.bind.annotation.adapters.HexBinaryAdapter().marshal(cript.digest());
} catch (Exception e) {
//ignore, we'll try and fake it based on the wsdl
// Strip the basedir from the doneFileName
if (doneFileName.startsWith(basedir.toString())) {
doneFileName = doneFileName.substring(basedir.toString().length());
}
// If URL to WSDL, replace ? and & since they're invalid chars for file names
// Not to mention slashes.
doneFileName = doneFileName.replace('?', '_').replace('&', '_').replace('/', '_').replace('\\', '_')
.replace(':', '_');
doneFileName += ".DONE";
}
return new File(markerDirectory, "." + doneFileName);
}
protected abstract File getGeneratedSourceRoot();
protected abstract File getGeneratedTestRoot();
protected void runForked(Set<URI> classPath,
String mainClassName,
String[] args) throws MojoExecutionException {
getLog().info("Running code generation in fork mode...");
getLog().debug("Running code generation in fork mode with args " + Arrays.asList(args));
Commandline cmd = new Commandline();
cmd.getShell().setQuotedArgumentsEnabled(true); // for JVM args
cmd.setWorkingDirectory(project.getBuild().getDirectory());
String javaPath = getJavaExecutable().getAbsolutePath();
cmd.setExecutable(javaPath);
setJvmForkArgs(javaPath);
cmd.createArg().setLine(additionalJvmArgs);
final File file;
try {
// file = new File("/tmp/test.jar");
file = FileUtils.createTempFile("cxf-codegen", ".jar");
JarArchiver jar = new JarArchiver();
jar.setDestFile(file.getAbsoluteFile());
Manifest manifest = new Manifest();
Attribute attr = new Attribute();
attr.setName("Class-Path");
StringBuilder b = new StringBuilder(8000);
for (URI cp : classPath) {
b.append(cp.toURL().toExternalForm()).append(' ');
}
attr.setValue(b.toString());
manifest.getMainSection().addConfiguredAttribute(attr);
attr = new Attribute();
attr.setName("Main-Class");
attr.setValue(mainClassName);
manifest.getMainSection().addConfiguredAttribute(attr);
jar.addConfiguredManifest(manifest);
jar.createArchive();
cmd.createArg().setValue("-jar");
String tmpFilePath = file.getAbsolutePath();
if (tmpFilePath.contains(" ")) {
//ensure the path is in double quotation marks if the path contain space
tmpFilePath = '"' + tmpFilePath + '"';
}
cmd.createArg().setValue(tmpFilePath);
} catch (Exception e1) {
throw new MojoExecutionException("Could not create runtime jar", e1);
}
cmd.addArguments(args);
StreamConsumer out = new StreamConsumer() {
public void consumeLine(String line) {
getLog().info(line);
}
};
final StringBuilder b = new StringBuilder();
StreamConsumer err = new StreamConsumer() {
public void consumeLine(String line) {
b.append(line);
b.append('\n');
getLog().warn(line);
}
};
int exitCode;
try {
exitCode = CommandLineUtils.executeCommandLine(cmd, out, err);
} catch (CommandLineException e) {
getLog().debug(e);
throw new MojoExecutionException(e.getMessage(), e);
}
String cmdLine = CommandLineUtils.toString(cmd.getCommandline());
if (exitCode != 0) {
StringBuilder msg = new StringBuilder("\nExit code: ");
msg.append(exitCode);
msg.append('\n');
msg.append("Command line was: ").append(cmdLine).append('\n').append('\n');
throw new MojoExecutionException(msg.toString());
}
file.delete();
if (b.toString().contains("WSDL2Java Error")) {
StringBuilder msg = new StringBuilder();
msg.append(b.toString());
msg.append('\n');
msg.append("Command line was: ").append(cmdLine).append('\n').append('\n');
throw new MojoExecutionException(msg.toString());
}
}
/**
* Run the JDK version (could be set via the toolchain) and see if we need to configure the JvmArgs
* accordingly. Once we remove JDK8 support we can just add the additional args by default and remove
* this method.
*/
private void setJvmForkArgs(String javaExecutablePath) {
Commandline cmd = new Commandline();
cmd.getShell().setQuotedArgumentsEnabled(true); // for JVM args
cmd.setWorkingDirectory(project.getBuild().getDirectory());
cmd.setExecutable(javaExecutablePath);
Java9StreamConsumer consumer = new Java9StreamConsumer();
try {
cmd.createArg().setValue("-XshowSettings:properties -version");
CommandLineUtils.executeCommandLine(cmd, null, consumer);
} catch (Exception e2) {
e2.printStackTrace();
}
if (additionalJvmArgs == null) {
additionalJvmArgs = "";
}
if (consumer.isJava9Plus()) {
additionalJvmArgs = "--add-exports=jdk.xml.dom/org.w3c.dom.html=ALL-UNNAMED "
+ "--add-exports=java.xml/com.sun.org.apache.xerces.internal.impl.xs=ALL-UNNAMED "
+ "--add-opens java.base/java.security=ALL-UNNAMED "
+ "--add-opens java.base/java.net=ALL-UNNAMED "
+ "--add-opens java.base/java.lang=ALL-UNNAMED "
+ "--add-opens java.base/java.util=ALL-UNNAMED "
+ "--add-opens java.base/java.util.concurrent=ALL-UNNAMED "
+ additionalJvmArgs;
}
}
/**
* Parse each line of the output for "java.version" and see if the version is >= 9
*/
private static final class Java9StreamConsumer implements StreamConsumer {
boolean java9;
public void consumeLine(String line) {
if (!java9 && line.contains("java.version")) {
String version = line.trim().substring("java.version = ".length());
if (version != null) {
if (version.indexOf('.') > 0) {
version = version.substring(0, version.indexOf('.'));
}
if (version.indexOf('-') > 0) {
version = version.substring(0, version.indexOf('-'));
}
try {
if (Integer.valueOf(version) >= 9) {
java9 = true;
}
} catch (NumberFormatException ex) {
// ignore
}
}
}
}
public boolean isJava9Plus() {
return java9;
}
}
/**
* Determine if code should be generated from the given wsdl
*
* @param wsdlOption
* @param doneFile
* @param wsdlURI
* @return
*/
protected abstract boolean shouldRun(GenericWsdlOption wsdlOption, File doneFile, URI wsdlURI);
protected void createMarkerFile(GenericWsdlOption wsdlOption, File doneFile, URI wsdlURI) throws IOException {
doneFile.createNewFile();
}
private String[] createForkOnceArgs(List<List<String>> wargs) throws MojoExecutionException {
try {
File f = FileUtils.createTempFile("cxf-w2j", "args");
PrintWriter fw = new PrintWriter(new FileWriter(f));
for (List<String> args : wargs) {
fw.println(Integer.toString(args.size()));
for (String s : args) {
fw.println(s);
}
}
fw.println("-1");
fw.close();
return new String[] {
f.getAbsolutePath()
};
} catch (IOException ex) {
throw new MojoExecutionException("Could not create argument file", ex);
}
}
/**
* Try to find a file matching the wsdl path (either absolutely, relatively to the current dir or to
* the project base dir)
*
* @return wsdl file
*/
public File getWsdlFile(GenericWsdlOption option, File baseDir) {
if (option.getUri() == null) {
return null;
}
File file = null;
try {
URI uri = new URI(option.getUri());
if (uri.isAbsolute()) {
file = new File(uri);
}
} catch (Exception e) {
// ignore
}
if (file == null || !file.exists()) {
file = new File(option.getUri());
}
if (!file.exists()) {
file = new File(baseDir, option.getUri());
}
return file;
}
public URI getWsdlURI(GenericWsdlOption option, URI baseURI) throws MojoExecutionException {
String wsdlLocation = option.getUri();
if (wsdlLocation == null) {
throw new MojoExecutionException("No wsdl available for base URI " + baseURI);
}
File wsdlFile = new File(wsdlLocation);
return wsdlFile.exists() ? wsdlFile.toURI()
: baseURI.resolve(URIParserUtil.escapeChars(wsdlLocation));
}
protected void downloadRemoteWsdls(List<GenericWsdlOption> effectiveWsdlOptions)
throws MojoExecutionException {
for (GenericWsdlOption wsdlOption : effectiveWsdlOptions) {
WsdlArtifact wsdlA = wsdlOption.getArtifact();
if (wsdlA == null) {
continue;
}
Artifact wsdlArtifact = repositorySystem.createArtifactWithClassifier(wsdlA.getGroupId(),
wsdlA.getArtifactId(),
wsdlA.getVersion(),
wsdlA.getType(),
wsdlA.getClassifier());
wsdlArtifact = resolveRemoteWsdlArtifact(wsdlArtifact);
if (wsdlArtifact != null) {
File supposedFile = wsdlArtifact.getFile();
if (!supposedFile.exists() || !supposedFile.isFile()) {
getLog().info("Apparent Maven bug: wsdl artifact 'resolved' to "
+ supposedFile.getAbsolutePath() + " for " + wsdlArtifact.toString());
continue;
}
String path = supposedFile.getAbsolutePath();
getLog().info("Resolved WSDL artifact to file " + path);
wsdlOption.setUri(path);
}
}
}
private File getJavaExecutable() throws MojoExecutionException {
if (javaExecutable != null) {
getLog().debug("Plugin configuration set the 'javaExecutable' parameter to " + javaExecutable);
} else {
Toolchain tc = toolchainManager.getToolchainFromBuildContext("jdk", mavenSession);
if (tc != null) {
getLog().info("Using toolchain " + tc + " to find the java executable");
javaExecutable = tc.findTool("java");
} else {
getLog().debug("The java executable is set to default value");
javaExecutable = SystemUtils.getJavaHome() + File.separator + "bin" + File.separator + "java";
}
}
String exe = SystemUtils.IS_OS_WINDOWS && !javaExecutable.endsWith(".exe") ? ".exe" : "";
File javaExe = new File(javaExecutable + exe);
if (!javaExe.isFile()) {
throw new MojoExecutionException("The java executable '" + javaExe + "' doesn't exist or is not a file."
+ " Verify the <javaExecutable/> parameter or toolchain configuration.");
}
getLog().info("The java executable is " + javaExe.getAbsolutePath());
return javaExe;
}
private Artifact resolveRemoteWsdlArtifact(Artifact artifact) throws MojoExecutionException {
Artifact remoteWsdl = resolveDependentWsdl(artifact);
if (remoteWsdl == null) {
remoteWsdl = resolveAttachedWsdl(artifact);
}
if (remoteWsdl == null) {
remoteWsdl = resolveArbitraryWsdl(artifact);
}
if (remoteWsdl != null && remoteWsdl.isResolved()) {
return remoteWsdl;
}
throw new MojoExecutionException(String.format("Failed to resolve WSDL artifact %s",
artifact.toString()));
}
private Artifact resolveDependentWsdl(Artifact artifact) {
Collection<String> scopes = new ArrayList<>();
scopes.add(Artifact.SCOPE_RUNTIME);
Set<Artifact> artifactSet = null;
try {
artifactSet = projectDependencyResolver.resolve(project, scopes, mavenSession);
} catch (AbstractArtifactResolutionException e) {
getLog().info("Error resolving dependent wsdl artifact.", e);
}
return findWsdlArtifact(artifact, artifactSet);
}
private Artifact resolveAttachedWsdl(Artifact artifact) {
List<MavenProject> rProjects = mavenSession.getProjects();
List<Artifact> artifactList = new ArrayList<>();
for (MavenProject rProject : rProjects) {
List<Artifact> list = CastUtils.cast(rProject.getAttachedArtifacts());
if (list != null) {
artifactList.addAll(list);
}
}
return findWsdlArtifact(artifact, artifactList);
}
private Artifact resolveArbitraryWsdl(Artifact artifact) {
ArtifactResolutionRequest request = new ArtifactResolutionRequest();
request.setArtifact(artifact);
request.setResolveRoot(true).setResolveTransitively(false);
request.setServers(mavenSession.getRequest().getServers());
request.setMirrors(mavenSession.getRequest().getMirrors());
request.setProxies(mavenSession.getRequest().getProxies());
request.setLocalRepository(mavenSession.getLocalRepository());
request.setRemoteRepositories(mavenSession.getRequest().getRemoteRepositories());
ArtifactResolutionResult result = repositorySystem.resolve(request);
Artifact resolvedArtifact = result.getOriginatingArtifact();
if (resolvedArtifact == null && !CollectionUtils.isEmpty(result.getArtifacts())) {
resolvedArtifact = result.getArtifacts().iterator().next();
}
return resolvedArtifact;
}
private Artifact findWsdlArtifact(Artifact targetArtifact, Collection<Artifact> artifactSet) {
if (artifactSet != null && !artifactSet.isEmpty()) {
for (Artifact pArtifact : artifactSet) {
boolean artifactMatched = isArtifactMatched(targetArtifact, pArtifact);
if (targetArtifact.getClassifier() != null && pArtifact.getClassifier() != null
&& targetArtifact.getClassifier().equals(pArtifact.getClassifier())
&& artifactMatched) {
//handle multile classifiers
return pArtifact;
} else if (artifactMatched && targetArtifact.getClassifier() == null) {
return pArtifact;
}
}
}
return null;
}
private boolean isArtifactMatched(Artifact targetArtifact, Artifact pArtifact) {
return targetArtifact.getGroupId().equals(pArtifact.getGroupId())
&& targetArtifact.getArtifactId().equals(pArtifact.getArtifactId())
&& targetArtifact.getVersion().equals(pArtifact.getVersion())
&& ("wsdl".equals(pArtifact.getType())
|| (
targetArtifact.getClassifier() != null
&& pArtifact.getType() != null
&& (targetArtifact.getClassifier() + ".wsdl").equals(pArtifact.getType())
));
}
}
|
googleapis/google-cloud-java | 38,071 | java-retail/google-cloud-retail/src/test/java/com/google/cloud/retail/v2alpha/CatalogServiceClientHttpJsonTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.retail.v2alpha;
import static com.google.cloud.retail.v2alpha.CatalogServiceClient.ListCatalogsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.testing.MockHttpService;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ApiException;
import com.google.api.gax.rpc.ApiExceptionFactory;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.testing.FakeStatusCode;
import com.google.cloud.retail.v2alpha.stub.HttpJsonCatalogServiceStub;
import com.google.common.collect.Lists;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class CatalogServiceClientHttpJsonTest {
private static MockHttpService mockService;
private static CatalogServiceClient client;
@BeforeClass
public static void startStaticServer() throws IOException {
mockService =
new MockHttpService(
HttpJsonCatalogServiceStub.getMethodDescriptors(),
CatalogServiceSettings.getDefaultEndpoint());
CatalogServiceSettings settings =
CatalogServiceSettings.newHttpJsonBuilder()
.setTransportChannelProvider(
CatalogServiceSettings.defaultHttpJsonTransportProviderBuilder()
.setHttpTransport(mockService)
.build())
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = CatalogServiceClient.create(settings);
}
@AfterClass
public static void stopServer() {
client.close();
}
@Before
public void setUp() {}
@After
public void tearDown() throws Exception {
mockService.reset();
}
@Test
public void listCatalogsTest() throws Exception {
Catalog responsesElement = Catalog.newBuilder().build();
ListCatalogsResponse expectedResponse =
ListCatalogsResponse.newBuilder()
.setNextPageToken("")
.addAllCatalogs(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent);
List<Catalog> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listCatalogsExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
client.listCatalogs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listCatalogsTest2() throws Exception {
Catalog responsesElement = Catalog.newBuilder().build();
ListCatalogsResponse expectedResponse =
ListCatalogsResponse.newBuilder()
.setNextPageToken("")
.addAllCatalogs(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
String parent = "projects/project-5833/locations/location-5833";
ListCatalogsPagedResponse pagedListResponse = client.listCatalogs(parent);
List<Catalog> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getCatalogsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listCatalogsExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String parent = "projects/project-5833/locations/location-5833";
client.listCatalogs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateCatalogTest() throws Exception {
Catalog expectedResponse =
Catalog.newBuilder()
.setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setDisplayName("displayName1714148973")
.setProductLevelConfig(ProductLevelConfig.newBuilder().build())
.setMerchantCenterLinkingConfig(MerchantCenterLinkingConfig.newBuilder().build())
.build();
mockService.addResponse(expectedResponse);
Catalog catalog =
Catalog.newBuilder()
.setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setDisplayName("displayName1714148973")
.setProductLevelConfig(ProductLevelConfig.newBuilder().build())
.setMerchantCenterLinkingConfig(MerchantCenterLinkingConfig.newBuilder().build())
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
Catalog actualResponse = client.updateCatalog(catalog, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void updateCatalogExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
Catalog catalog =
Catalog.newBuilder()
.setName(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setDisplayName("displayName1714148973")
.setProductLevelConfig(ProductLevelConfig.newBuilder().build())
.setMerchantCenterLinkingConfig(MerchantCenterLinkingConfig.newBuilder().build())
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateCatalog(catalog, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setDefaultBranchTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockService.addResponse(expectedResponse);
CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
client.setDefaultBranch(catalog);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void setDefaultBranchExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
client.setDefaultBranch(catalog);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setDefaultBranchTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockService.addResponse(expectedResponse);
String catalog = "projects/project-6372/locations/location-6372/catalogs/catalog-6372";
client.setDefaultBranch(catalog);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void setDefaultBranchExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String catalog = "projects/project-6372/locations/location-6372/catalogs/catalog-6372";
client.setDefaultBranch(catalog);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDefaultBranchTest() throws Exception {
GetDefaultBranchResponse expectedResponse =
GetDefaultBranchResponse.newBuilder()
.setBranch(BranchName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[BRANCH]").toString())
.setSetTime(Timestamp.newBuilder().build())
.setNote("note3387378")
.build();
mockService.addResponse(expectedResponse);
CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
GetDefaultBranchResponse actualResponse = client.getDefaultBranch(catalog);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getDefaultBranchExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
CatalogName catalog = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
client.getDefaultBranch(catalog);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDefaultBranchTest2() throws Exception {
GetDefaultBranchResponse expectedResponse =
GetDefaultBranchResponse.newBuilder()
.setBranch(BranchName.of("[PROJECT]", "[LOCATION]", "[CATALOG]", "[BRANCH]").toString())
.setSetTime(Timestamp.newBuilder().build())
.setNote("note3387378")
.build();
mockService.addResponse(expectedResponse);
String catalog = "projects/project-6372/locations/location-6372/catalogs/catalog-6372";
GetDefaultBranchResponse actualResponse = client.getDefaultBranch(catalog);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getDefaultBranchExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String catalog = "projects/project-6372/locations/location-6372/catalogs/catalog-6372";
client.getDefaultBranch(catalog);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getCompletionConfigTest() throws Exception {
CompletionConfig expectedResponse =
CompletionConfig.newBuilder()
.setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setMatchingOrder("matchingOrder-1366761135")
.setMaxSuggestions(618824852)
.setMinPrefixLength(96853510)
.setAutoLearning(true)
.setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751")
.setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastDenylistImportOperation("lastDenylistImportOperation1262341570")
.setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689")
.build();
mockService.addResponse(expectedResponse);
CompletionConfigName name = CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
CompletionConfig actualResponse = client.getCompletionConfig(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getCompletionConfigExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
CompletionConfigName name = CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
client.getCompletionConfig(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getCompletionConfigTest2() throws Exception {
CompletionConfig expectedResponse =
CompletionConfig.newBuilder()
.setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setMatchingOrder("matchingOrder-1366761135")
.setMaxSuggestions(618824852)
.setMinPrefixLength(96853510)
.setAutoLearning(true)
.setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751")
.setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastDenylistImportOperation("lastDenylistImportOperation1262341570")
.setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689")
.build();
mockService.addResponse(expectedResponse);
String name =
"projects/project-6627/locations/location-6627/catalogs/catalog-6627/completionConfig";
CompletionConfig actualResponse = client.getCompletionConfig(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getCompletionConfigExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name =
"projects/project-6627/locations/location-6627/catalogs/catalog-6627/completionConfig";
client.getCompletionConfig(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateCompletionConfigTest() throws Exception {
CompletionConfig expectedResponse =
CompletionConfig.newBuilder()
.setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setMatchingOrder("matchingOrder-1366761135")
.setMaxSuggestions(618824852)
.setMinPrefixLength(96853510)
.setAutoLearning(true)
.setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751")
.setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastDenylistImportOperation("lastDenylistImportOperation1262341570")
.setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689")
.build();
mockService.addResponse(expectedResponse);
CompletionConfig completionConfig =
CompletionConfig.newBuilder()
.setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setMatchingOrder("matchingOrder-1366761135")
.setMaxSuggestions(618824852)
.setMinPrefixLength(96853510)
.setAutoLearning(true)
.setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751")
.setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastDenylistImportOperation("lastDenylistImportOperation1262341570")
.setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689")
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
CompletionConfig actualResponse = client.updateCompletionConfig(completionConfig, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void updateCompletionConfigExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
CompletionConfig completionConfig =
CompletionConfig.newBuilder()
.setName(CompletionConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setMatchingOrder("matchingOrder-1366761135")
.setMaxSuggestions(618824852)
.setMinPrefixLength(96853510)
.setAutoLearning(true)
.setSuggestionsInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastSuggestionsImportOperation("lastSuggestionsImportOperation-245829751")
.setDenylistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastDenylistImportOperation("lastDenylistImportOperation1262341570")
.setAllowlistInputConfig(CompletionDataInputConfig.newBuilder().build())
.setLastAllowlistImportOperation("lastAllowlistImportOperation1624716689")
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateCompletionConfig(completionConfig, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getAttributesConfigTest() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
AttributesConfigName name = AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
AttributesConfig actualResponse = client.getAttributesConfig(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getAttributesConfigExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
AttributesConfigName name = AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
client.getAttributesConfig(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getAttributesConfigTest2() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
String name =
"projects/project-9790/locations/location-9790/catalogs/catalog-9790/attributesConfig";
AttributesConfig actualResponse = client.getAttributesConfig(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getAttributesConfigExceptionTest2() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String name =
"projects/project-9790/locations/location-9790/catalogs/catalog-9790/attributesConfig";
client.getAttributesConfig(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateAttributesConfigTest() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
AttributesConfig attributesConfig =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
AttributesConfig actualResponse = client.updateAttributesConfig(attributesConfig, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void updateAttributesConfigExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
AttributesConfig attributesConfig =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateAttributesConfig(attributesConfig, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void addCatalogAttributeTest() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
AddCatalogAttributeRequest request =
AddCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setCatalogAttribute(CatalogAttribute.newBuilder().build())
.build();
AttributesConfig actualResponse = client.addCatalogAttribute(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void addCatalogAttributeExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
AddCatalogAttributeRequest request =
AddCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setCatalogAttribute(CatalogAttribute.newBuilder().build())
.build();
client.addCatalogAttribute(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void removeCatalogAttributeTest() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
RemoveCatalogAttributeRequest request =
RemoveCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setKey("key106079")
.build();
AttributesConfig actualResponse = client.removeCatalogAttribute(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void removeCatalogAttributeExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
RemoveCatalogAttributeRequest request =
RemoveCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setKey("key106079")
.build();
client.removeCatalogAttribute(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void batchRemoveCatalogAttributesTest() throws Exception {
BatchRemoveCatalogAttributesResponse expectedResponse =
BatchRemoveCatalogAttributesResponse.newBuilder()
.addAllDeletedCatalogAttributes(new ArrayList<String>())
.addAllResetCatalogAttributes(new ArrayList<String>())
.build();
mockService.addResponse(expectedResponse);
BatchRemoveCatalogAttributesRequest request =
BatchRemoveCatalogAttributesRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.addAllAttributeKeys(new ArrayList<String>())
.build();
BatchRemoveCatalogAttributesResponse actualResponse =
client.batchRemoveCatalogAttributes(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void batchRemoveCatalogAttributesExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
BatchRemoveCatalogAttributesRequest request =
BatchRemoveCatalogAttributesRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.addAllAttributeKeys(new ArrayList<String>())
.build();
client.batchRemoveCatalogAttributes(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void replaceCatalogAttributeTest() throws Exception {
AttributesConfig expectedResponse =
AttributesConfig.newBuilder()
.setName(AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.putAllCatalogAttributes(new HashMap<String, CatalogAttribute>())
.setAttributeConfigLevel(AttributeConfigLevel.forNumber(0))
.build();
mockService.addResponse(expectedResponse);
ReplaceCatalogAttributeRequest request =
ReplaceCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setCatalogAttribute(CatalogAttribute.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
AttributesConfig actualResponse = client.replaceCatalogAttribute(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void replaceCatalogAttributeExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
ReplaceCatalogAttributeRequest request =
ReplaceCatalogAttributeRequest.newBuilder()
.setAttributesConfig(
AttributesConfigName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
.setCatalogAttribute(CatalogAttribute.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
client.replaceCatalogAttribute(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
googleapis/google-api-java-client-services | 38,505 | clients/google-api-services-discoveryengine/v1alpha/2.0.0/com/google/api/services/discoveryengine/v1alpha/model/GoogleCloudDiscoveryengineV1alphaUserEvent.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.discoveryengine.v1alpha.model;
/**
* UserEvent captures all metadata information Discovery Engine API needs to know about how end
* users interact with your website.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Discovery Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudDiscoveryengineV1alphaUserEvent extends com.google.api.client.json.GenericJson {
/**
* Extra user event features to include in the recommendation model. These attributes must NOT
* contain data that needs to be parsed or processed further, e.g. JSON or other encodings. If you
* provide custom attributes for ingested user events, also include them in the user events that
* you associate with prediction requests. Custom attribute formatting must be consistent between
* imported events and events provided with prediction requests. This lets the Discovery Engine
* API use those custom attributes when training models and serving predictions, which helps
* improve recommendation quality. This field needs to pass all below criteria, otherwise an
* `INVALID_ARGUMENT` error is returned: * The key must be a UTF-8 encoded string with a length
* limit of 5,000 characters. * For text attributes, at most 400 values are allowed. Empty values
* are not allowed. Each value must be a UTF-8 encoded string with a length limit of 256
* characters. * For number attributes, at most 400 values are allowed. For product
* recommendations, an example of extra user information is `traffic_channel`, which is how a user
* arrives at the site. Users can arrive at the site by coming to the site directly, coming
* through Google search, or in other ways.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, GoogleCloudDiscoveryengineV1alphaCustomAttribute> attributes;
static {
// hack to force ProGuard to consider GoogleCloudDiscoveryengineV1alphaCustomAttribute used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(GoogleCloudDiscoveryengineV1alphaCustomAttribute.class);
}
/**
* Token to attribute an API response to user action(s) to trigger the event. Highly recommended
* for user events that are the result of RecommendationService.Recommend. This field enables
* accurate attribution of recommendation model performance. The value must be one of: *
* RecommendResponse.attribution_token for events that are the result of
* RecommendationService.Recommend. * SearchResponse.attribution_token for events that are the
* result of SearchService.Search. This token enables us to accurately attribute page view or
* conversion completion back to the event and the particular predict response containing this
* clicked/purchased product. If user clicks on product K in the recommendation results, pass
* RecommendResponse.attribution_token as a URL parameter to product K's page. When recording
* events on product K's page, log the RecommendResponse.attribution_token to this field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String attributionToken;
/**
* CompletionService.CompleteQuery details related to the event. This field should be set for
* `search` event when autocomplete function is enabled and the user clicks a suggestion for
* search.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaCompletionInfo completionInfo;
/**
* Optional. Conversion type. Required if UserEvent.event_type is `conversion`. This is a
* customer-defined conversion name in lowercase letters or numbers separated by "-", such as
* "watch", "good-visit" etc. Do not set the field if UserEvent.event_type is not `conversion`.
* This mixes the custom conversion event with predefined events like `search`, `view-item` etc.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String conversionType;
/**
* The DataStore resource full name, of the form `projects/{project}/locations/{location}/collecti
* ons/{collection_id}/dataStores/{data_store_id}`. Optional. Only required for user events whose
* data store can't by determined by UserEvent.engine or UserEvent.documents. If data store is set
* in the parent of write/import/collect user event requests, this field can be omitted.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String dataStore;
/**
* Should set to true if the request is made directly from the end user, in which case the
* UserEvent.user_info.user_agent can be populated from the HTTP request. This flag should be set
* only if the API request is made directly from the end user such as a mobile app (and not if a
* gateway or a server is processing and pushing the user events). This should not be set when
* using the JavaScript tag in UserEventService.CollectUserEvent.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean directUserRequest;
/**
* List of Documents associated with this user event. This field is optional except for the
* following event types: * `view-item` * `add-to-cart` * `purchase` * `media-play` * `media-
* complete` In a `search` event, this field represents the documents returned to the end user on
* the current page (the end user may have not finished browsing the whole page yet). When a new
* page is returned to the end user, after pagination/filtering/ordering even for the same query,
* a new `search` event with different UserEvent.documents is desired.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GoogleCloudDiscoveryengineV1alphaDocumentInfo> documents;
static {
// hack to force ProGuard to consider GoogleCloudDiscoveryengineV1alphaDocumentInfo used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(GoogleCloudDiscoveryengineV1alphaDocumentInfo.class);
}
/**
* The Engine resource name, in the form of
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* Optional. Only required for Engine produced user events. For example, user events from blended
* search.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String engine;
/**
* Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event
* happened.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String eventTime;
/**
* Required. User event type. Allowed values are: Generic values: * `search`: Search for
* Documents. * `view-item`: Detailed page view of a Document. * `view-item-list`: View of a panel
* or ordered list of Documents. * `view-home-page`: View of the home page. * `view-category-
* page`: View of a category page, e.g. Home > Men > Jeans Retail-related values: * `add-to-cart`:
* Add an item(s) to cart, e.g. in Retail online shopping * `purchase`: Purchase an item(s) Media-
* related values: * `media-play`: Start/resume watching a video, playing a song, etc. * `media-
* complete`: Finished or stopped midway through a video, song, etc. Custom conversion value: *
* `conversion`: Customer defined conversion event.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String eventType;
/**
* The filter syntax consists of an expression language for constructing a predicate from one or
* more fields of the documents being filtered. One example is for `search` events, the associated
* SearchRequest may contain a filter expression in SearchRequest.filter conforming to
* https://google.aip.dev/160#filtering. Similarly, for `view-item-list` events that are generated
* from a RecommendRequest, this field may be populated directly from RecommendRequest.filter
* conforming to https://google.aip.dev/160#filtering. The value must be a UTF-8 encoded string
* with a length limit of 1,000 characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String filter;
/**
* Media-specific info.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaMediaInfo mediaInfo;
/**
* Page metadata such as categories and other critical information for certain event types such as
* `view-category-page`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaPageInfo pageInfo;
/**
* Panel metadata associated with this user event.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaPanelInfo panel;
/**
* Optional. List of panels associated with this event. Used for page-level impression data.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GoogleCloudDiscoveryengineV1alphaPanelInfo> panels;
static {
// hack to force ProGuard to consider GoogleCloudDiscoveryengineV1alphaPanelInfo used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(GoogleCloudDiscoveryengineV1alphaPanelInfo.class);
}
/**
* The promotion IDs if this is an event associated with promotions. Currently, this field is
* restricted to at most one ID.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> promotionIds;
/**
* SearchService.Search details related to the event. This field should be set for `search` event.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaSearchInfo searchInfo;
/**
* A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session
* is an aggregation of an end user behavior in a time span. A general guideline to populate the
* session_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The
* session_id should be unique across users, suggest use uuid or add UserEvent.user_pseudo_id as
* prefix.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sessionId;
/**
* A list of identifiers for the independent experiment groups this user event belongs to. This is
* used to distinguish between user events associated with different experiment setups.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> tagIds;
/**
* The transaction metadata (if any) associated with this user event.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaTransactionInfo transactionInfo;
/**
* Information about the end user.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDiscoveryengineV1alphaUserInfo userInfo;
/**
* Required. A unique identifier for tracking visitors. For example, this could be implemented
* with an HTTP cookie, which should be able to uniquely identify a visitor on a single device.
* This unique identifier should not change if the visitor log in/out of the website. Do not set
* the field to the same fixed ID for different users. This mixes the event history of those users
* together, which results in degraded model quality. The field must be a UTF-8 encoded string
* with a length limit of 128 characters. Otherwise, an `INVALID_ARGUMENT` error is returned. The
* field should not contain PII or user-data. We recommend to use Google Analytics [Client
* ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field-
* reference#clientId) for this field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String userPseudoId;
/**
* Extra user event features to include in the recommendation model. These attributes must NOT
* contain data that needs to be parsed or processed further, e.g. JSON or other encodings. If you
* provide custom attributes for ingested user events, also include them in the user events that
* you associate with prediction requests. Custom attribute formatting must be consistent between
* imported events and events provided with prediction requests. This lets the Discovery Engine
* API use those custom attributes when training models and serving predictions, which helps
* improve recommendation quality. This field needs to pass all below criteria, otherwise an
* `INVALID_ARGUMENT` error is returned: * The key must be a UTF-8 encoded string with a length
* limit of 5,000 characters. * For text attributes, at most 400 values are allowed. Empty values
* are not allowed. Each value must be a UTF-8 encoded string with a length limit of 256
* characters. * For number attributes, at most 400 values are allowed. For product
* recommendations, an example of extra user information is `traffic_channel`, which is how a user
* arrives at the site. Users can arrive at the site by coming to the site directly, coming
* through Google search, or in other ways.
* @return value or {@code null} for none
*/
public java.util.Map<String, GoogleCloudDiscoveryengineV1alphaCustomAttribute> getAttributes() {
return attributes;
}
/**
* Extra user event features to include in the recommendation model. These attributes must NOT
* contain data that needs to be parsed or processed further, e.g. JSON or other encodings. If you
* provide custom attributes for ingested user events, also include them in the user events that
* you associate with prediction requests. Custom attribute formatting must be consistent between
* imported events and events provided with prediction requests. This lets the Discovery Engine
* API use those custom attributes when training models and serving predictions, which helps
* improve recommendation quality. This field needs to pass all below criteria, otherwise an
* `INVALID_ARGUMENT` error is returned: * The key must be a UTF-8 encoded string with a length
* limit of 5,000 characters. * For text attributes, at most 400 values are allowed. Empty values
* are not allowed. Each value must be a UTF-8 encoded string with a length limit of 256
* characters. * For number attributes, at most 400 values are allowed. For product
* recommendations, an example of extra user information is `traffic_channel`, which is how a user
* arrives at the site. Users can arrive at the site by coming to the site directly, coming
* through Google search, or in other ways.
* @param attributes attributes or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setAttributes(java.util.Map<String, GoogleCloudDiscoveryengineV1alphaCustomAttribute> attributes) {
this.attributes = attributes;
return this;
}
/**
* Token to attribute an API response to user action(s) to trigger the event. Highly recommended
* for user events that are the result of RecommendationService.Recommend. This field enables
* accurate attribution of recommendation model performance. The value must be one of: *
* RecommendResponse.attribution_token for events that are the result of
* RecommendationService.Recommend. * SearchResponse.attribution_token for events that are the
* result of SearchService.Search. This token enables us to accurately attribute page view or
* conversion completion back to the event and the particular predict response containing this
* clicked/purchased product. If user clicks on product K in the recommendation results, pass
* RecommendResponse.attribution_token as a URL parameter to product K's page. When recording
* events on product K's page, log the RecommendResponse.attribution_token to this field.
* @return value or {@code null} for none
*/
public java.lang.String getAttributionToken() {
return attributionToken;
}
/**
* Token to attribute an API response to user action(s) to trigger the event. Highly recommended
* for user events that are the result of RecommendationService.Recommend. This field enables
* accurate attribution of recommendation model performance. The value must be one of: *
* RecommendResponse.attribution_token for events that are the result of
* RecommendationService.Recommend. * SearchResponse.attribution_token for events that are the
* result of SearchService.Search. This token enables us to accurately attribute page view or
* conversion completion back to the event and the particular predict response containing this
* clicked/purchased product. If user clicks on product K in the recommendation results, pass
* RecommendResponse.attribution_token as a URL parameter to product K's page. When recording
* events on product K's page, log the RecommendResponse.attribution_token to this field.
* @param attributionToken attributionToken or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setAttributionToken(java.lang.String attributionToken) {
this.attributionToken = attributionToken;
return this;
}
/**
* CompletionService.CompleteQuery details related to the event. This field should be set for
* `search` event when autocomplete function is enabled and the user clicks a suggestion for
* search.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaCompletionInfo getCompletionInfo() {
return completionInfo;
}
/**
* CompletionService.CompleteQuery details related to the event. This field should be set for
* `search` event when autocomplete function is enabled and the user clicks a suggestion for
* search.
* @param completionInfo completionInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setCompletionInfo(GoogleCloudDiscoveryengineV1alphaCompletionInfo completionInfo) {
this.completionInfo = completionInfo;
return this;
}
/**
* Optional. Conversion type. Required if UserEvent.event_type is `conversion`. This is a
* customer-defined conversion name in lowercase letters or numbers separated by "-", such as
* "watch", "good-visit" etc. Do not set the field if UserEvent.event_type is not `conversion`.
* This mixes the custom conversion event with predefined events like `search`, `view-item` etc.
* @return value or {@code null} for none
*/
public java.lang.String getConversionType() {
return conversionType;
}
/**
* Optional. Conversion type. Required if UserEvent.event_type is `conversion`. This is a
* customer-defined conversion name in lowercase letters or numbers separated by "-", such as
* "watch", "good-visit" etc. Do not set the field if UserEvent.event_type is not `conversion`.
* This mixes the custom conversion event with predefined events like `search`, `view-item` etc.
* @param conversionType conversionType or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setConversionType(java.lang.String conversionType) {
this.conversionType = conversionType;
return this;
}
/**
* The DataStore resource full name, of the form `projects/{project}/locations/{location}/collecti
* ons/{collection_id}/dataStores/{data_store_id}`. Optional. Only required for user events whose
* data store can't by determined by UserEvent.engine or UserEvent.documents. If data store is set
* in the parent of write/import/collect user event requests, this field can be omitted.
* @return value or {@code null} for none
*/
public java.lang.String getDataStore() {
return dataStore;
}
/**
* The DataStore resource full name, of the form `projects/{project}/locations/{location}/collecti
* ons/{collection_id}/dataStores/{data_store_id}`. Optional. Only required for user events whose
* data store can't by determined by UserEvent.engine or UserEvent.documents. If data store is set
* in the parent of write/import/collect user event requests, this field can be omitted.
* @param dataStore dataStore or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setDataStore(java.lang.String dataStore) {
this.dataStore = dataStore;
return this;
}
/**
* Should set to true if the request is made directly from the end user, in which case the
* UserEvent.user_info.user_agent can be populated from the HTTP request. This flag should be set
* only if the API request is made directly from the end user such as a mobile app (and not if a
* gateway or a server is processing and pushing the user events). This should not be set when
* using the JavaScript tag in UserEventService.CollectUserEvent.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDirectUserRequest() {
return directUserRequest;
}
/**
* Should set to true if the request is made directly from the end user, in which case the
* UserEvent.user_info.user_agent can be populated from the HTTP request. This flag should be set
* only if the API request is made directly from the end user such as a mobile app (and not if a
* gateway or a server is processing and pushing the user events). This should not be set when
* using the JavaScript tag in UserEventService.CollectUserEvent.
* @param directUserRequest directUserRequest or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setDirectUserRequest(java.lang.Boolean directUserRequest) {
this.directUserRequest = directUserRequest;
return this;
}
/**
* List of Documents associated with this user event. This field is optional except for the
* following event types: * `view-item` * `add-to-cart` * `purchase` * `media-play` * `media-
* complete` In a `search` event, this field represents the documents returned to the end user on
* the current page (the end user may have not finished browsing the whole page yet). When a new
* page is returned to the end user, after pagination/filtering/ordering even for the same query,
* a new `search` event with different UserEvent.documents is desired.
* @return value or {@code null} for none
*/
public java.util.List<GoogleCloudDiscoveryengineV1alphaDocumentInfo> getDocuments() {
return documents;
}
/**
* List of Documents associated with this user event. This field is optional except for the
* following event types: * `view-item` * `add-to-cart` * `purchase` * `media-play` * `media-
* complete` In a `search` event, this field represents the documents returned to the end user on
* the current page (the end user may have not finished browsing the whole page yet). When a new
* page is returned to the end user, after pagination/filtering/ordering even for the same query,
* a new `search` event with different UserEvent.documents is desired.
* @param documents documents or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setDocuments(java.util.List<GoogleCloudDiscoveryengineV1alphaDocumentInfo> documents) {
this.documents = documents;
return this;
}
/**
* The Engine resource name, in the form of
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* Optional. Only required for Engine produced user events. For example, user events from blended
* search.
* @return value or {@code null} for none
*/
public java.lang.String getEngine() {
return engine;
}
/**
* The Engine resource name, in the form of
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* Optional. Only required for Engine produced user events. For example, user events from blended
* search.
* @param engine engine or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setEngine(java.lang.String engine) {
this.engine = engine;
return this;
}
/**
* Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event
* happened.
* @return value or {@code null} for none
*/
public String getEventTime() {
return eventTime;
}
/**
* Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event
* happened.
* @param eventTime eventTime or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setEventTime(String eventTime) {
this.eventTime = eventTime;
return this;
}
/**
* Required. User event type. Allowed values are: Generic values: * `search`: Search for
* Documents. * `view-item`: Detailed page view of a Document. * `view-item-list`: View of a panel
* or ordered list of Documents. * `view-home-page`: View of the home page. * `view-category-
* page`: View of a category page, e.g. Home > Men > Jeans Retail-related values: * `add-to-cart`:
* Add an item(s) to cart, e.g. in Retail online shopping * `purchase`: Purchase an item(s) Media-
* related values: * `media-play`: Start/resume watching a video, playing a song, etc. * `media-
* complete`: Finished or stopped midway through a video, song, etc. Custom conversion value: *
* `conversion`: Customer defined conversion event.
* @return value or {@code null} for none
*/
public java.lang.String getEventType() {
return eventType;
}
/**
* Required. User event type. Allowed values are: Generic values: * `search`: Search for
* Documents. * `view-item`: Detailed page view of a Document. * `view-item-list`: View of a panel
* or ordered list of Documents. * `view-home-page`: View of the home page. * `view-category-
* page`: View of a category page, e.g. Home > Men > Jeans Retail-related values: * `add-to-cart`:
* Add an item(s) to cart, e.g. in Retail online shopping * `purchase`: Purchase an item(s) Media-
* related values: * `media-play`: Start/resume watching a video, playing a song, etc. * `media-
* complete`: Finished or stopped midway through a video, song, etc. Custom conversion value: *
* `conversion`: Customer defined conversion event.
* @param eventType eventType or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setEventType(java.lang.String eventType) {
this.eventType = eventType;
return this;
}
/**
* The filter syntax consists of an expression language for constructing a predicate from one or
* more fields of the documents being filtered. One example is for `search` events, the associated
* SearchRequest may contain a filter expression in SearchRequest.filter conforming to
* https://google.aip.dev/160#filtering. Similarly, for `view-item-list` events that are generated
* from a RecommendRequest, this field may be populated directly from RecommendRequest.filter
* conforming to https://google.aip.dev/160#filtering. The value must be a UTF-8 encoded string
* with a length limit of 1,000 characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* @return value or {@code null} for none
*/
public java.lang.String getFilter() {
return filter;
}
/**
* The filter syntax consists of an expression language for constructing a predicate from one or
* more fields of the documents being filtered. One example is for `search` events, the associated
* SearchRequest may contain a filter expression in SearchRequest.filter conforming to
* https://google.aip.dev/160#filtering. Similarly, for `view-item-list` events that are generated
* from a RecommendRequest, this field may be populated directly from RecommendRequest.filter
* conforming to https://google.aip.dev/160#filtering. The value must be a UTF-8 encoded string
* with a length limit of 1,000 characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* @param filter filter or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setFilter(java.lang.String filter) {
this.filter = filter;
return this;
}
/**
* Media-specific info.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaMediaInfo getMediaInfo() {
return mediaInfo;
}
/**
* Media-specific info.
* @param mediaInfo mediaInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setMediaInfo(GoogleCloudDiscoveryengineV1alphaMediaInfo mediaInfo) {
this.mediaInfo = mediaInfo;
return this;
}
/**
* Page metadata such as categories and other critical information for certain event types such as
* `view-category-page`.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaPageInfo getPageInfo() {
return pageInfo;
}
/**
* Page metadata such as categories and other critical information for certain event types such as
* `view-category-page`.
* @param pageInfo pageInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setPageInfo(GoogleCloudDiscoveryengineV1alphaPageInfo pageInfo) {
this.pageInfo = pageInfo;
return this;
}
/**
* Panel metadata associated with this user event.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaPanelInfo getPanel() {
return panel;
}
/**
* Panel metadata associated with this user event.
* @param panel panel or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setPanel(GoogleCloudDiscoveryengineV1alphaPanelInfo panel) {
this.panel = panel;
return this;
}
/**
* Optional. List of panels associated with this event. Used for page-level impression data.
* @return value or {@code null} for none
*/
public java.util.List<GoogleCloudDiscoveryengineV1alphaPanelInfo> getPanels() {
return panels;
}
/**
* Optional. List of panels associated with this event. Used for page-level impression data.
* @param panels panels or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setPanels(java.util.List<GoogleCloudDiscoveryengineV1alphaPanelInfo> panels) {
this.panels = panels;
return this;
}
/**
* The promotion IDs if this is an event associated with promotions. Currently, this field is
* restricted to at most one ID.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPromotionIds() {
return promotionIds;
}
/**
* The promotion IDs if this is an event associated with promotions. Currently, this field is
* restricted to at most one ID.
* @param promotionIds promotionIds or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setPromotionIds(java.util.List<java.lang.String> promotionIds) {
this.promotionIds = promotionIds;
return this;
}
/**
* SearchService.Search details related to the event. This field should be set for `search` event.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaSearchInfo getSearchInfo() {
return searchInfo;
}
/**
* SearchService.Search details related to the event. This field should be set for `search` event.
* @param searchInfo searchInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setSearchInfo(GoogleCloudDiscoveryengineV1alphaSearchInfo searchInfo) {
this.searchInfo = searchInfo;
return this;
}
/**
* A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session
* is an aggregation of an end user behavior in a time span. A general guideline to populate the
* session_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The
* session_id should be unique across users, suggest use uuid or add UserEvent.user_pseudo_id as
* prefix.
* @return value or {@code null} for none
*/
public java.lang.String getSessionId() {
return sessionId;
}
/**
* A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session
* is an aggregation of an end user behavior in a time span. A general guideline to populate the
* session_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The
* session_id should be unique across users, suggest use uuid or add UserEvent.user_pseudo_id as
* prefix.
* @param sessionId sessionId or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setSessionId(java.lang.String sessionId) {
this.sessionId = sessionId;
return this;
}
/**
* A list of identifiers for the independent experiment groups this user event belongs to. This is
* used to distinguish between user events associated with different experiment setups.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getTagIds() {
return tagIds;
}
/**
* A list of identifiers for the independent experiment groups this user event belongs to. This is
* used to distinguish between user events associated with different experiment setups.
* @param tagIds tagIds or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setTagIds(java.util.List<java.lang.String> tagIds) {
this.tagIds = tagIds;
return this;
}
/**
* The transaction metadata (if any) associated with this user event.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaTransactionInfo getTransactionInfo() {
return transactionInfo;
}
/**
* The transaction metadata (if any) associated with this user event.
* @param transactionInfo transactionInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setTransactionInfo(GoogleCloudDiscoveryengineV1alphaTransactionInfo transactionInfo) {
this.transactionInfo = transactionInfo;
return this;
}
/**
* Information about the end user.
* @return value or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserInfo getUserInfo() {
return userInfo;
}
/**
* Information about the end user.
* @param userInfo userInfo or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setUserInfo(GoogleCloudDiscoveryengineV1alphaUserInfo userInfo) {
this.userInfo = userInfo;
return this;
}
/**
* Required. A unique identifier for tracking visitors. For example, this could be implemented
* with an HTTP cookie, which should be able to uniquely identify a visitor on a single device.
* This unique identifier should not change if the visitor log in/out of the website. Do not set
* the field to the same fixed ID for different users. This mixes the event history of those users
* together, which results in degraded model quality. The field must be a UTF-8 encoded string
* with a length limit of 128 characters. Otherwise, an `INVALID_ARGUMENT` error is returned. The
* field should not contain PII or user-data. We recommend to use Google Analytics [Client
* ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field-
* reference#clientId) for this field.
* @return value or {@code null} for none
*/
public java.lang.String getUserPseudoId() {
return userPseudoId;
}
/**
* Required. A unique identifier for tracking visitors. For example, this could be implemented
* with an HTTP cookie, which should be able to uniquely identify a visitor on a single device.
* This unique identifier should not change if the visitor log in/out of the website. Do not set
* the field to the same fixed ID for different users. This mixes the event history of those users
* together, which results in degraded model quality. The field must be a UTF-8 encoded string
* with a length limit of 128 characters. Otherwise, an `INVALID_ARGUMENT` error is returned. The
* field should not contain PII or user-data. We recommend to use Google Analytics [Client
* ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field-
* reference#clientId) for this field.
* @param userPseudoId userPseudoId or {@code null} for none
*/
public GoogleCloudDiscoveryengineV1alphaUserEvent setUserPseudoId(java.lang.String userPseudoId) {
this.userPseudoId = userPseudoId;
return this;
}
@Override
public GoogleCloudDiscoveryengineV1alphaUserEvent set(String fieldName, Object value) {
return (GoogleCloudDiscoveryengineV1alphaUserEvent) super.set(fieldName, value);
}
@Override
public GoogleCloudDiscoveryengineV1alphaUserEvent clone() {
return (GoogleCloudDiscoveryengineV1alphaUserEvent) super.clone();
}
}
|
googleapis/google-cloud-java | 38,176 | java-accessapproval/proto-google-cloud-accessapproval-v1/src/main/java/com/google/cloud/accessapproval/v1/ListApprovalRequestsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/accessapproval/v1/accessapproval.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.accessapproval.v1;
/**
*
*
* <pre>
* Response to listing of ApprovalRequest objects.
* </pre>
*
* Protobuf type {@code google.cloud.accessapproval.v1.ListApprovalRequestsResponse}
*/
public final class ListApprovalRequestsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.accessapproval.v1.ListApprovalRequestsResponse)
ListApprovalRequestsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListApprovalRequestsResponse.newBuilder() to construct.
private ListApprovalRequestsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListApprovalRequestsResponse() {
approvalRequests_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListApprovalRequestsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.accessapproval.v1.AccessApprovalProto
.internal_static_google_cloud_accessapproval_v1_ListApprovalRequestsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.accessapproval.v1.AccessApprovalProto
.internal_static_google_cloud_accessapproval_v1_ListApprovalRequestsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse.class,
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse.Builder.class);
}
public static final int APPROVAL_REQUESTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.accessapproval.v1.ApprovalRequest> approvalRequests_;
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.accessapproval.v1.ApprovalRequest>
getApprovalRequestsList() {
return approvalRequests_;
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.accessapproval.v1.ApprovalRequestOrBuilder>
getApprovalRequestsOrBuilderList() {
return approvalRequests_;
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
@java.lang.Override
public int getApprovalRequestsCount() {
return approvalRequests_.size();
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
@java.lang.Override
public com.google.cloud.accessapproval.v1.ApprovalRequest getApprovalRequests(int index) {
return approvalRequests_.get(index);
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
@java.lang.Override
public com.google.cloud.accessapproval.v1.ApprovalRequestOrBuilder getApprovalRequestsOrBuilder(
int index) {
return approvalRequests_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < approvalRequests_.size(); i++) {
output.writeMessage(1, approvalRequests_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < approvalRequests_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, approvalRequests_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse)) {
return super.equals(obj);
}
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse other =
(com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse) obj;
if (!getApprovalRequestsList().equals(other.getApprovalRequestsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getApprovalRequestsCount() > 0) {
hash = (37 * hash) + APPROVAL_REQUESTS_FIELD_NUMBER;
hash = (53 * hash) + getApprovalRequestsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response to listing of ApprovalRequest objects.
* </pre>
*
* Protobuf type {@code google.cloud.accessapproval.v1.ListApprovalRequestsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.accessapproval.v1.ListApprovalRequestsResponse)
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.accessapproval.v1.AccessApprovalProto
.internal_static_google_cloud_accessapproval_v1_ListApprovalRequestsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.accessapproval.v1.AccessApprovalProto
.internal_static_google_cloud_accessapproval_v1_ListApprovalRequestsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse.class,
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse.Builder.class);
}
// Construct using com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (approvalRequestsBuilder_ == null) {
approvalRequests_ = java.util.Collections.emptyList();
} else {
approvalRequests_ = null;
approvalRequestsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.accessapproval.v1.AccessApprovalProto
.internal_static_google_cloud_accessapproval_v1_ListApprovalRequestsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse
getDefaultInstanceForType() {
return com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse build() {
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse buildPartial() {
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse result =
new com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse result) {
if (approvalRequestsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
approvalRequests_ = java.util.Collections.unmodifiableList(approvalRequests_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.approvalRequests_ = approvalRequests_;
} else {
result.approvalRequests_ = approvalRequestsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse) {
return mergeFrom((com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse other) {
if (other
== com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse.getDefaultInstance())
return this;
if (approvalRequestsBuilder_ == null) {
if (!other.approvalRequests_.isEmpty()) {
if (approvalRequests_.isEmpty()) {
approvalRequests_ = other.approvalRequests_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureApprovalRequestsIsMutable();
approvalRequests_.addAll(other.approvalRequests_);
}
onChanged();
}
} else {
if (!other.approvalRequests_.isEmpty()) {
if (approvalRequestsBuilder_.isEmpty()) {
approvalRequestsBuilder_.dispose();
approvalRequestsBuilder_ = null;
approvalRequests_ = other.approvalRequests_;
bitField0_ = (bitField0_ & ~0x00000001);
approvalRequestsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getApprovalRequestsFieldBuilder()
: null;
} else {
approvalRequestsBuilder_.addAllMessages(other.approvalRequests_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.accessapproval.v1.ApprovalRequest m =
input.readMessage(
com.google.cloud.accessapproval.v1.ApprovalRequest.parser(),
extensionRegistry);
if (approvalRequestsBuilder_ == null) {
ensureApprovalRequestsIsMutable();
approvalRequests_.add(m);
} else {
approvalRequestsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.accessapproval.v1.ApprovalRequest> approvalRequests_ =
java.util.Collections.emptyList();
private void ensureApprovalRequestsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
approvalRequests_ =
new java.util.ArrayList<com.google.cloud.accessapproval.v1.ApprovalRequest>(
approvalRequests_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.accessapproval.v1.ApprovalRequest,
com.google.cloud.accessapproval.v1.ApprovalRequest.Builder,
com.google.cloud.accessapproval.v1.ApprovalRequestOrBuilder>
approvalRequestsBuilder_;
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public java.util.List<com.google.cloud.accessapproval.v1.ApprovalRequest>
getApprovalRequestsList() {
if (approvalRequestsBuilder_ == null) {
return java.util.Collections.unmodifiableList(approvalRequests_);
} else {
return approvalRequestsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public int getApprovalRequestsCount() {
if (approvalRequestsBuilder_ == null) {
return approvalRequests_.size();
} else {
return approvalRequestsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public com.google.cloud.accessapproval.v1.ApprovalRequest getApprovalRequests(int index) {
if (approvalRequestsBuilder_ == null) {
return approvalRequests_.get(index);
} else {
return approvalRequestsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public Builder setApprovalRequests(
int index, com.google.cloud.accessapproval.v1.ApprovalRequest value) {
if (approvalRequestsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureApprovalRequestsIsMutable();
approvalRequests_.set(index, value);
onChanged();
} else {
approvalRequestsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public Builder setApprovalRequests(
int index, com.google.cloud.accessapproval.v1.ApprovalRequest.Builder builderForValue) {
if (approvalRequestsBuilder_ == null) {
ensureApprovalRequestsIsMutable();
approvalRequests_.set(index, builderForValue.build());
onChanged();
} else {
approvalRequestsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public Builder addApprovalRequests(com.google.cloud.accessapproval.v1.ApprovalRequest value) {
if (approvalRequestsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureApprovalRequestsIsMutable();
approvalRequests_.add(value);
onChanged();
} else {
approvalRequestsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public Builder addApprovalRequests(
int index, com.google.cloud.accessapproval.v1.ApprovalRequest value) {
if (approvalRequestsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureApprovalRequestsIsMutable();
approvalRequests_.add(index, value);
onChanged();
} else {
approvalRequestsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public Builder addApprovalRequests(
com.google.cloud.accessapproval.v1.ApprovalRequest.Builder builderForValue) {
if (approvalRequestsBuilder_ == null) {
ensureApprovalRequestsIsMutable();
approvalRequests_.add(builderForValue.build());
onChanged();
} else {
approvalRequestsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public Builder addApprovalRequests(
int index, com.google.cloud.accessapproval.v1.ApprovalRequest.Builder builderForValue) {
if (approvalRequestsBuilder_ == null) {
ensureApprovalRequestsIsMutable();
approvalRequests_.add(index, builderForValue.build());
onChanged();
} else {
approvalRequestsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public Builder addAllApprovalRequests(
java.lang.Iterable<? extends com.google.cloud.accessapproval.v1.ApprovalRequest> values) {
if (approvalRequestsBuilder_ == null) {
ensureApprovalRequestsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, approvalRequests_);
onChanged();
} else {
approvalRequestsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public Builder clearApprovalRequests() {
if (approvalRequestsBuilder_ == null) {
approvalRequests_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
approvalRequestsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public Builder removeApprovalRequests(int index) {
if (approvalRequestsBuilder_ == null) {
ensureApprovalRequestsIsMutable();
approvalRequests_.remove(index);
onChanged();
} else {
approvalRequestsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public com.google.cloud.accessapproval.v1.ApprovalRequest.Builder getApprovalRequestsBuilder(
int index) {
return getApprovalRequestsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public com.google.cloud.accessapproval.v1.ApprovalRequestOrBuilder getApprovalRequestsOrBuilder(
int index) {
if (approvalRequestsBuilder_ == null) {
return approvalRequests_.get(index);
} else {
return approvalRequestsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public java.util.List<? extends com.google.cloud.accessapproval.v1.ApprovalRequestOrBuilder>
getApprovalRequestsOrBuilderList() {
if (approvalRequestsBuilder_ != null) {
return approvalRequestsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(approvalRequests_);
}
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public com.google.cloud.accessapproval.v1.ApprovalRequest.Builder addApprovalRequestsBuilder() {
return getApprovalRequestsFieldBuilder()
.addBuilder(com.google.cloud.accessapproval.v1.ApprovalRequest.getDefaultInstance());
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public com.google.cloud.accessapproval.v1.ApprovalRequest.Builder addApprovalRequestsBuilder(
int index) {
return getApprovalRequestsFieldBuilder()
.addBuilder(
index, com.google.cloud.accessapproval.v1.ApprovalRequest.getDefaultInstance());
}
/**
*
*
* <pre>
* Approval request details.
* </pre>
*
* <code>repeated .google.cloud.accessapproval.v1.ApprovalRequest approval_requests = 1;</code>
*/
public java.util.List<com.google.cloud.accessapproval.v1.ApprovalRequest.Builder>
getApprovalRequestsBuilderList() {
return getApprovalRequestsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.accessapproval.v1.ApprovalRequest,
com.google.cloud.accessapproval.v1.ApprovalRequest.Builder,
com.google.cloud.accessapproval.v1.ApprovalRequestOrBuilder>
getApprovalRequestsFieldBuilder() {
if (approvalRequestsBuilder_ == null) {
approvalRequestsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.accessapproval.v1.ApprovalRequest,
com.google.cloud.accessapproval.v1.ApprovalRequest.Builder,
com.google.cloud.accessapproval.v1.ApprovalRequestOrBuilder>(
approvalRequests_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
approvalRequests_ = null;
}
return approvalRequestsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.accessapproval.v1.ListApprovalRequestsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.accessapproval.v1.ListApprovalRequestsResponse)
private static final com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse();
}
public static com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListApprovalRequestsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListApprovalRequestsResponse>() {
@java.lang.Override
public ListApprovalRequestsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListApprovalRequestsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListApprovalRequestsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.accessapproval.v1.ListApprovalRequestsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/gravitino | 38,400 | core/src/main/java/org/apache/gravitino/job/JobManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.gravitino.job;
import static org.apache.gravitino.Metalake.PROPERTY_IN_USE;
import static org.apache.gravitino.metalake.MetalakeManager.checkMetalake;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.time.Instant;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.gravitino.Config;
import org.apache.gravitino.Configs;
import org.apache.gravitino.Entity;
import org.apache.gravitino.EntityAlreadyExistsException;
import org.apache.gravitino.EntityStore;
import org.apache.gravitino.NameIdentifier;
import org.apache.gravitino.Namespace;
import org.apache.gravitino.connector.job.JobExecutor;
import org.apache.gravitino.exceptions.InUseException;
import org.apache.gravitino.exceptions.JobTemplateAlreadyExistsException;
import org.apache.gravitino.exceptions.NoSuchEntityException;
import org.apache.gravitino.exceptions.NoSuchJobException;
import org.apache.gravitino.exceptions.NoSuchJobTemplateException;
import org.apache.gravitino.lock.LockType;
import org.apache.gravitino.lock.TreeLockUtils;
import org.apache.gravitino.meta.AuditInfo;
import org.apache.gravitino.meta.BaseMetalake;
import org.apache.gravitino.meta.JobEntity;
import org.apache.gravitino.meta.JobTemplateEntity;
import org.apache.gravitino.storage.IdGenerator;
import org.apache.gravitino.utils.NameIdentifierUtil;
import org.apache.gravitino.utils.NamespaceUtil;
import org.apache.gravitino.utils.PrincipalUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JobManager implements JobOperationDispatcher {
private static final Logger LOG = LoggerFactory.getLogger(JobManager.class);
private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("\\{\\{([\\w.-]+)\\}\\}");
private static final String JOB_STAGING_DIR =
File.separator
+ "%s"
+ File.separator
+ "%s"
+ File.separator
+ JobHandle.JOB_ID_PREFIX
+ "%s";
private static final long JOB_STAGING_DIR_CLEANUP_MIN_TIME_IN_MS = 600 * 1000L; // 10 minute
private static final long JOB_STATUS_PULL_MIN_INTERVAL_IN_MS = 60 * 1000L; // 1 minute
private static final int TIMEOUT_IN_MS = 30 * 1000; // 30 seconds
private final EntityStore entityStore;
private final File stagingDir;
private final JobExecutor jobExecutor;
private final IdGenerator idGenerator;
private final long jobStagingDirKeepTimeInMs;
private final ScheduledExecutorService cleanUpExecutor;
private final ScheduledExecutorService statusPullExecutor;
public JobManager(Config config, EntityStore entityStore, IdGenerator idGenerator) {
this(config, entityStore, idGenerator, JobExecutorFactory.create(config));
}
@VisibleForTesting
JobManager(
Config config, EntityStore entityStore, IdGenerator idGenerator, JobExecutor jobExecutor) {
this.entityStore = entityStore;
this.jobExecutor = jobExecutor;
this.idGenerator = idGenerator;
String stagingDirPath = config.get(Configs.JOB_STAGING_DIR);
this.stagingDir = new File(stagingDirPath);
if (stagingDir.exists()) {
if (!stagingDir.isDirectory()) {
throw new IllegalArgumentException(
String.format("Staging directory %s exists but is not a directory", stagingDirPath));
}
if (!(stagingDir.canExecute() && stagingDir.canRead() && stagingDir.canWrite())) {
throw new IllegalArgumentException(
String.format("Staging directory %s is not accessible", stagingDirPath));
}
} else {
if (!stagingDir.mkdirs()) {
throw new IllegalArgumentException(
String.format("Failed to create staging directory %s", stagingDirPath));
}
}
this.jobStagingDirKeepTimeInMs = config.get(Configs.JOB_STAGING_DIR_KEEP_TIME_IN_MS);
if (jobStagingDirKeepTimeInMs < JOB_STAGING_DIR_CLEANUP_MIN_TIME_IN_MS) {
LOG.warn(
"The job staging directory keep time is set to {} ms, the number is too small, "
+ "which will cause frequent cleanup, please set it to a value larger than {} if "
+ "you're not using it to do the test.",
jobStagingDirKeepTimeInMs,
JOB_STAGING_DIR_CLEANUP_MIN_TIME_IN_MS);
}
this.cleanUpExecutor =
Executors.newSingleThreadScheduledExecutor(
runnable -> {
Thread thread = new Thread(runnable, "job-staging-dir-cleanup");
thread.setDaemon(true);
return thread;
});
long scheduleInterval = jobStagingDirKeepTimeInMs / 10;
Preconditions.checkArgument(
scheduleInterval != 0,
"The schedule interval for "
+ "job staging directory cleanup cannot be zero, please set the job staging directory "
+ "keep time to a value larger than %s ms",
JOB_STAGING_DIR_CLEANUP_MIN_TIME_IN_MS);
cleanUpExecutor.scheduleAtFixedRate(
this::cleanUpStagingDirs, scheduleInterval, scheduleInterval, TimeUnit.MILLISECONDS);
long jobStatusPullIntervalInMs = config.get(Configs.JOB_STATUS_PULL_INTERVAL_IN_MS);
if (jobStatusPullIntervalInMs < JOB_STATUS_PULL_MIN_INTERVAL_IN_MS) {
LOG.warn(
"The job status pull interval is set to {} ms, the number is too small, "
+ "which will cause frequent job status pull from external job executor, please set "
+ "it to a value larger than {} if you're not using it to do the test.",
jobStatusPullIntervalInMs,
JOB_STATUS_PULL_MIN_INTERVAL_IN_MS);
}
this.statusPullExecutor =
Executors.newSingleThreadScheduledExecutor(
runnable -> {
Thread thread = new Thread(runnable, "job-status-pull");
thread.setDaemon(true);
return thread;
});
statusPullExecutor.scheduleAtFixedRate(
this::pullAndUpdateJobStatus,
jobStatusPullIntervalInMs,
jobStatusPullIntervalInMs,
TimeUnit.MILLISECONDS);
}
@Override
public List<JobTemplateEntity> listJobTemplates(String metalake) {
checkMetalake(NameIdentifierUtil.ofMetalake(metalake), entityStore);
Namespace jobTemplateNs = NamespaceUtil.ofJobTemplate(metalake);
return TreeLockUtils.doWithTreeLock(
NameIdentifier.of(jobTemplateNs.levels()),
LockType.READ,
() -> {
try {
return entityStore.list(
jobTemplateNs, JobTemplateEntity.class, Entity.EntityType.JOB_TEMPLATE);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
});
}
@Override
public void registerJobTemplate(String metalake, JobTemplateEntity jobTemplateEntity)
throws JobTemplateAlreadyExistsException {
checkMetalake(NameIdentifierUtil.ofMetalake(metalake), entityStore);
NameIdentifier jobTemplateIdent =
NameIdentifierUtil.ofJobTemplate(metalake, jobTemplateEntity.name());
TreeLockUtils.doWithTreeLock(
jobTemplateIdent,
LockType.WRITE,
() -> {
try {
entityStore.put(jobTemplateEntity, false /* overwrite */);
return null;
} catch (EntityAlreadyExistsException e) {
throw new JobTemplateAlreadyExistsException(
"Job template with name %s under metalake %s already exists",
jobTemplateEntity.name(), metalake);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
});
}
@Override
public JobTemplateEntity getJobTemplate(String metalake, String jobTemplateName)
throws NoSuchJobTemplateException {
checkMetalake(NameIdentifierUtil.ofMetalake(metalake), entityStore);
NameIdentifier jobTemplateIdent = NameIdentifierUtil.ofJobTemplate(metalake, jobTemplateName);
return TreeLockUtils.doWithTreeLock(
jobTemplateIdent,
LockType.READ,
() -> {
try {
return entityStore.get(
jobTemplateIdent, Entity.EntityType.JOB_TEMPLATE, JobTemplateEntity.class);
} catch (NoSuchEntityException e) {
throw new NoSuchJobTemplateException(
"Job template with name %s under metalake %s does not exist",
jobTemplateName, metalake);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
});
}
@Override
public boolean deleteJobTemplate(String metalake, String jobTemplateName) throws InUseException {
checkMetalake(NameIdentifierUtil.ofMetalake(metalake), entityStore);
List<JobEntity> jobs;
try {
jobs = listJobs(metalake, Optional.of(jobTemplateName));
} catch (NoSuchJobTemplateException e) {
// If the job template does not exist, we can safely return false.
return false;
}
boolean hasActiveJobs =
jobs.stream()
.anyMatch(
job ->
job.status() != JobHandle.Status.CANCELLED
&& job.status() != JobHandle.Status.SUCCEEDED
&& job.status() != JobHandle.Status.FAILED);
if (hasActiveJobs) {
throw new InUseException(
"Job template %s under metalake %s has active jobs associated with it",
jobTemplateName, metalake);
}
// Delete all the job staging directories associated with the job template.
String jobTemplateStagingPath =
stagingDir.getAbsolutePath() + File.separator + metalake + File.separator + jobTemplateName;
File jobTemplateStagingDir = new File(jobTemplateStagingPath);
if (jobTemplateStagingDir.exists()) {
try {
FileUtils.deleteDirectory(jobTemplateStagingDir);
} catch (IOException e) {
LOG.error("Failed to delete job template staging directory: {}", jobTemplateStagingPath, e);
}
}
// Delete the job template entity as well as all the jobs associated with it.
return TreeLockUtils.doWithTreeLock(
NameIdentifier.of(NamespaceUtil.ofJobTemplate(metalake).levels()),
LockType.WRITE,
() -> {
try {
return entityStore.delete(
NameIdentifierUtil.ofJobTemplate(metalake, jobTemplateName),
Entity.EntityType.JOB_TEMPLATE);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
});
}
@Override
public JobTemplateEntity alterJobTemplate(
String metalake, String jobTemplateName, JobTemplateChange... changes)
throws NoSuchJobTemplateException, IllegalArgumentException {
checkMetalake(NameIdentifierUtil.ofMetalake(metalake), entityStore);
Optional<String> newName =
Arrays.stream(changes)
.filter(c -> c instanceof JobTemplateChange.RenameJobTemplate)
.map(c -> ((JobTemplateChange.RenameJobTemplate) c).getNewName())
.reduce((first, second) -> second);
NameIdentifier jobTemplateIdent = NameIdentifierUtil.ofJobTemplate(metalake, jobTemplateName);
return TreeLockUtils.doWithTreeLock(
jobTemplateIdent,
LockType.READ, // Use READ lock because the update method in JobTemplateMetaService will
// handle the update transactionally and update with a new version number. So we don't
// have to use a WRITE lock here.
() -> {
try {
return entityStore.update(
jobTemplateIdent,
JobTemplateEntity.class,
Entity.EntityType.JOB_TEMPLATE,
jobTemplateEntity ->
updateJobTemplateEntity(jobTemplateIdent, jobTemplateEntity, changes));
} catch (NoSuchEntityException e) {
throw new NoSuchJobTemplateException(
"Job template with name %s under metalake %s does not exist, this could be due to"
+ " the job template not existing or updated concurrently. For the latter case"
+ " please retry the operation.",
jobTemplateName, metalake);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
} catch (EntityAlreadyExistsException e) {
// If the EntityAlreadyExistsException is thrown, it means the new name already exists.
// So there should be a rename change, and the new name should be present.
throw new RuntimeException(
String.format(
"Failed to rename job template from %s to %s under metalake %s, the new name "
+ "already exists",
jobTemplateName, newName, metalake),
e);
}
});
}
@Override
public List<JobEntity> listJobs(String metalake, Optional<String> jobTemplateName)
throws NoSuchJobTemplateException {
checkMetalake(NameIdentifierUtil.ofMetalake(metalake), entityStore);
Namespace jobNs = NamespaceUtil.ofJob(metalake);
return TreeLockUtils.doWithTreeLock(
NameIdentifier.of(jobNs.levels()),
LockType.READ,
() -> {
try {
// If jobTemplateName is present, check if the job template exists, will throw an
// exception if the job template does not exist.
jobTemplateName.ifPresent(s -> getJobTemplate(metalake, s));
List<JobEntity> jobEntities;
if (jobTemplateName.isPresent()) {
NameIdentifier jobTemplateIdent =
NameIdentifierUtil.ofJobTemplate(metalake, jobTemplateName.get());
// If jobTemplateName is present, we need to list the jobs associated with the job.
// Using a mock namespace from job template identifier to get the jobs associated
// with job template.
String[] elements =
ArrayUtils.add(jobTemplateIdent.namespace().levels(), jobTemplateIdent.name());
Namespace jobTemplateIdentNs = Namespace.of(elements);
// Lock the job template to ensure no concurrent modifications/deletions
jobEntities =
TreeLockUtils.doWithTreeLock(
jobTemplateIdent,
LockType.READ,
() ->
// List all the jobs associated with the job template
entityStore.list(
jobTemplateIdentNs, JobEntity.class, Entity.EntityType.JOB));
} else {
jobEntities = entityStore.list(jobNs, JobEntity.class, Entity.EntityType.JOB);
}
return jobEntities;
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
});
}
@Override
public JobEntity getJob(String metalake, String jobId) throws NoSuchJobException {
checkMetalake(NameIdentifierUtil.ofMetalake(metalake), entityStore);
NameIdentifier jobIdent = NameIdentifierUtil.ofJob(metalake, jobId);
return TreeLockUtils.doWithTreeLock(
jobIdent,
LockType.READ,
() -> {
try {
return entityStore.get(jobIdent, Entity.EntityType.JOB, JobEntity.class);
} catch (NoSuchEntityException e) {
throw new NoSuchJobException(
"Job with ID %s under metalake %s does not exist", jobId, metalake);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
});
}
@Override
public JobEntity runJob(String metalake, String jobTemplateName, Map<String, String> jobConf)
throws NoSuchJobTemplateException {
checkMetalake(NameIdentifierUtil.ofMetalake(metalake), entityStore);
// Check if the job template exists, will throw NoSuchJobTemplateException if it does not exist.
JobTemplateEntity jobTemplateEntity = getJobTemplate(metalake, jobTemplateName);
// Create staging directory.
long jobId = idGenerator.nextId();
String jobStagingPath =
stagingDir.getAbsolutePath()
+ String.format(JOB_STAGING_DIR, metalake, jobTemplateName, jobId);
File jobStagingDir = new File(jobStagingPath);
if (!jobStagingDir.mkdirs()) {
throw new RuntimeException(
String.format("Failed to create staging directory %s for job %s", jobStagingDir, jobId));
}
// Create a JobTemplate by replacing the template parameters with the jobConf values, and
// also downloading any necessary files from the URIs specified in the job template.
JobTemplate jobTemplate = createRuntimeJobTemplate(jobTemplateEntity, jobConf, jobStagingDir);
// Submit the job template to the job executor
String jobExecutionId;
try {
jobExecutionId = jobExecutor.submitJob(jobTemplate);
} catch (Exception e) {
throw new RuntimeException(
String.format("Failed to submit job template %s for execution", jobTemplate), e);
}
// Create a new JobEntity to represent the job
JobEntity jobEntity =
JobEntity.builder()
.withId(jobId)
.withJobExecutionId(jobExecutionId)
.withJobTemplateName(jobTemplateName)
.withStatus(JobHandle.Status.QUEUED)
.withNamespace(NamespaceUtil.ofJob(metalake))
.withAuditInfo(
AuditInfo.builder()
.withCreator(PrincipalUtils.getCurrentPrincipal().getName())
.withCreateTime(Instant.now())
.build())
.build();
try {
entityStore.put(jobEntity, false /* overwrite */);
} catch (IOException e) {
throw new RuntimeException("Failed to register the job entity " + jobEntity, e);
}
return jobEntity;
}
@Override
public JobEntity cancelJob(String metalake, String jobId) throws NoSuchJobException {
checkMetalake(NameIdentifierUtil.ofMetalake(metalake), entityStore);
// Retrieve the job entity, will throw NoSuchJobException if the job does not exist.
JobEntity jobEntity = getJob(metalake, jobId);
if (jobEntity.status() == JobHandle.Status.CANCELLING
|| jobEntity.status() == JobHandle.Status.CANCELLED
|| jobEntity.status() == JobHandle.Status.SUCCEEDED
|| jobEntity.status() == JobHandle.Status.FAILED) {
// If the job is already cancelling, cancelled, succeeded, or failed, we do not need to cancel
// it again.
return jobEntity;
}
// Cancel the job using the job executor
try {
jobExecutor.cancelJob(jobEntity.jobExecutionId());
} catch (Exception e) {
throw new RuntimeException(
String.format("Failed to cancel job with ID %s under metalake %s", jobId, metalake), e);
}
// Update the job status to CANCELING
JobEntity newJobEntity =
JobEntity.builder()
.withId(jobEntity.id())
.withJobExecutionId(jobEntity.jobExecutionId())
.withJobTemplateName(jobEntity.jobTemplateName())
.withStatus(JobHandle.Status.CANCELLING)
.withNamespace(jobEntity.namespace())
.withAuditInfo(
AuditInfo.builder()
.withCreator(jobEntity.auditInfo().creator())
.withCreateTime(jobEntity.auditInfo().createTime())
.withLastModifier(PrincipalUtils.getCurrentPrincipal().getName())
.withLastModifiedTime(Instant.now())
.build())
.build();
return TreeLockUtils.doWithTreeLock(
NameIdentifierUtil.ofJob(metalake, jobId),
LockType.WRITE,
() -> {
try {
// Update the job entity in the entity store
entityStore.put(newJobEntity, true /* overwrite */);
return newJobEntity;
} catch (IOException e) {
throw new RuntimeException(
String.format("Failed to update job entity %s to CANCELING status", newJobEntity),
e);
}
});
}
@Override
public void close() throws IOException {
jobExecutor.close();
statusPullExecutor.shutdownNow();
cleanUpExecutor.shutdownNow();
}
@VisibleForTesting
void pullAndUpdateJobStatus() {
List<String> metalakes = listInUseMetalakes(entityStore);
for (String metalake : metalakes) {
// This unnecessary list all the jobs, we need to improve the code to only list the active
// jobs.
List<JobEntity> activeJobs =
listJobs(metalake, Optional.empty()).stream()
.filter(
job ->
job.status() == JobHandle.Status.QUEUED
|| job.status() == JobHandle.Status.STARTED
|| job.status() == JobHandle.Status.CANCELLING)
.toList();
activeJobs.forEach(
job -> {
JobHandle.Status newStatus = job.status();
try {
newStatus = jobExecutor.getJobStatus(job.jobExecutionId());
} catch (NoSuchJobException e) {
// If the job is not found in the external job executor, we assume the job is
// FAILED if it is not in CANCELLING status, otherwise we assume it is CANCELLED.
if (job.status() == JobHandle.Status.CANCELLING) {
newStatus = JobHandle.Status.CANCELLED;
} else {
newStatus = JobHandle.Status.FAILED;
}
LOG.warn(
"Job {} with execution id {} under metalake {} is not found in the "
+ "external job executor, marking it as {}. This could be due to the job "
+ "being deleted by the external job executor. Please check the external job "
+ "executor to know more details.",
job.name(),
job.jobExecutionId(),
metalake,
newStatus);
} catch (Exception e) {
LOG.error(
"Failed to get job status for job {} by execution id {}",
job.name(),
job.jobExecutionId(),
e);
}
if (newStatus != job.status()) {
JobEntity newJobEntity =
JobEntity.builder()
.withId(job.id())
.withJobExecutionId(job.jobExecutionId())
.withJobTemplateName(job.jobTemplateName())
.withStatus(newStatus)
.withNamespace(job.namespace())
.withAuditInfo(
AuditInfo.builder()
.withCreator(job.auditInfo().creator())
.withCreateTime(job.auditInfo().createTime())
.withLastModifier(PrincipalUtils.getCurrentPrincipal().getName())
.withLastModifiedTime(Instant.now())
.build())
.build();
// Update the job entity with new status.
JobHandle.Status finalNewStatus = newStatus;
TreeLockUtils.doWithTreeLock(
NameIdentifierUtil.ofJob(metalake, job.name()),
LockType.WRITE,
() -> {
try {
entityStore.put(newJobEntity, true /* overwrite */);
return null;
} catch (IOException e) {
throw new RuntimeException(
String.format(
"Failed to update job entity %s to status %s",
newJobEntity, finalNewStatus),
e);
}
});
LOG.info(
"Updated the job {} with execution id {} status to {}",
job.name(),
job.jobExecutionId(),
newStatus);
}
});
}
}
@VisibleForTesting
void cleanUpStagingDirs() {
List<String> metalakes = listInUseMetalakes(entityStore);
for (String metalake : metalakes) {
List<JobEntity> finishedJobs =
listJobs(metalake, Optional.empty()).stream()
.filter(
job ->
job.status() == JobHandle.Status.CANCELLED
|| job.status() == JobHandle.Status.SUCCEEDED
|| job.status() == JobHandle.Status.FAILED)
.filter(
job ->
job.finishedAt() > 0
&& job.finishedAt() + jobStagingDirKeepTimeInMs
< System.currentTimeMillis())
.toList();
finishedJobs.forEach(
job -> {
try {
entityStore.delete(
NameIdentifierUtil.ofJob(metalake, job.name()), Entity.EntityType.JOB);
String jobStagingPath =
stagingDir.getAbsolutePath()
+ String.format(JOB_STAGING_DIR, metalake, job.jobTemplateName(), job.id());
File jobStagingDir = new File(jobStagingPath);
if (jobStagingDir.exists()) {
FileUtils.deleteDirectory(jobStagingDir);
LOG.info("Deleted job staging directory {} for job {}", jobStagingPath, job.name());
}
} catch (IOException e) {
LOG.error("Failed to delete job and staging directory for job {}", job.name(), e);
}
});
}
}
@VisibleForTesting
public static JobTemplate createRuntimeJobTemplate(
JobTemplateEntity jobTemplateEntity, Map<String, String> jobConf, File stagingDir) {
String name = jobTemplateEntity.name();
String comment = jobTemplateEntity.comment();
JobTemplateEntity.TemplateContent content = jobTemplateEntity.templateContent();
String executable = fetchFileFromUri(content.executable(), stagingDir, TIMEOUT_IN_MS);
List<String> args =
content.arguments().stream()
.map(arg -> replacePlaceholder(arg, jobConf))
.collect(Collectors.toList());
Map<String, String> environments =
content.environments().entrySet().stream()
.collect(
Collectors.toMap(
entry -> replacePlaceholder(entry.getKey(), jobConf),
entry -> replacePlaceholder(entry.getValue(), jobConf)));
Map<String, String> customFields =
content.customFields().entrySet().stream()
.collect(
Collectors.toMap(
entry -> replacePlaceholder(entry.getKey(), jobConf),
entry -> replacePlaceholder(entry.getValue(), jobConf)));
// For shell job template
if (content.jobType() == JobTemplate.JobType.SHELL) {
List<String> scripts = fetchFilesFromUri(content.scripts(), stagingDir, TIMEOUT_IN_MS);
return ShellJobTemplate.builder()
.withName(name)
.withComment(comment)
.withExecutable(executable)
.withArguments(args)
.withEnvironments(environments)
.withCustomFields(customFields)
.withScripts(scripts)
.build();
}
// For Spark job template
if (content.jobType() == JobTemplate.JobType.SPARK) {
String className = content.className();
List<String> jars = fetchFilesFromUri(content.jars(), stagingDir, TIMEOUT_IN_MS);
List<String> files = fetchFilesFromUri(content.files(), stagingDir, TIMEOUT_IN_MS);
List<String> archives = fetchFilesFromUri(content.archives(), stagingDir, TIMEOUT_IN_MS);
Map<String, String> configs =
content.configs().entrySet().stream()
.collect(
Collectors.toMap(
entry -> replacePlaceholder(entry.getKey(), jobConf),
entry -> replacePlaceholder(entry.getValue(), jobConf)));
return SparkJobTemplate.builder()
.withName(name)
.withComment(comment)
.withExecutable(executable)
.withArguments(args)
.withEnvironments(environments)
.withCustomFields(customFields)
.withClassName(className)
.withJars(jars)
.withFiles(files)
.withArchives(archives)
.withConfigs(configs)
.build();
}
throw new IllegalArgumentException("Unsupported job type: " + content.jobType());
}
@VisibleForTesting
static String replacePlaceholder(String inputString, Map<String, String> replacements) {
if (StringUtils.isBlank(inputString)) {
return inputString; // Return as is if the input string is blank
}
StringBuilder result = new StringBuilder();
Matcher matcher = PLACEHOLDER_PATTERN.matcher(inputString);
while (matcher.find()) {
String key = matcher.group(1);
String replacement = replacements.get(key);
if (replacement != null) {
matcher.appendReplacement(result, replacement);
} else {
// If no replacement is found, keep the placeholder as is
matcher.appendReplacement(result, matcher.group(0));
}
}
matcher.appendTail(result);
return result.toString();
}
@VisibleForTesting
static List<String> fetchFilesFromUri(List<String> uris, File stagingDir, int timeoutInMs) {
return uris.stream()
.map(uri -> fetchFileFromUri(uri, stagingDir, timeoutInMs))
.collect(Collectors.toList());
}
@VisibleForTesting
static String fetchFileFromUri(String uri, File stagingDir, int timeoutInMs) {
try {
URI fileUri = new URI(uri);
String scheme = Optional.ofNullable(fileUri.getScheme()).orElse("file");
File destFile = new File(stagingDir, new File(fileUri.getPath()).getName());
switch (scheme) {
case "http":
case "https":
case "ftp":
FileUtils.copyURLToFile(fileUri.toURL(), destFile, timeoutInMs, timeoutInMs);
break;
case "file":
Files.createSymbolicLink(destFile.toPath(), new File(fileUri.getPath()).toPath());
break;
default:
throw new IllegalArgumentException("Unsupported scheme: " + scheme);
}
return destFile.getAbsolutePath();
} catch (Exception e) {
throw new RuntimeException(String.format("Failed to fetch file from URI %s", uri), e);
}
}
private static List<String> listInUseMetalakes(EntityStore entityStore) {
try {
List<BaseMetalake> metalakes =
TreeLockUtils.doWithRootTreeLock(
LockType.READ,
() ->
entityStore.list(
Namespace.empty(), BaseMetalake.class, Entity.EntityType.METALAKE));
return metalakes.stream()
.filter(
m -> (boolean) m.propertiesMetadata().getOrDefault(m.properties(), PROPERTY_IN_USE))
.map(BaseMetalake::name)
.collect(Collectors.toList());
} catch (IOException e) {
throw new RuntimeException("Failed to list in-use metalakes", e);
}
}
@VisibleForTesting
JobTemplateEntity updateJobTemplateEntity(
NameIdentifier jobTemplateIdent,
JobTemplateEntity jobTemplateEntity,
JobTemplateChange... changes) {
String newName = jobTemplateEntity.name();
String newComment = jobTemplateEntity.comment();
JobTemplateEntity.Builder newTemplateBuilder = JobTemplateEntity.builder();
JobTemplateEntity.TemplateContent.TemplateContentBuilder newTemplateContentBuilder =
JobTemplateEntity.TemplateContent.builder()
.withJobType(jobTemplateEntity.templateContent().jobType())
.withExecutable(jobTemplateEntity.templateContent().executable())
.withArguments(jobTemplateEntity.templateContent().arguments())
.withEnvironments(jobTemplateEntity.templateContent().environments())
.withCustomFields(jobTemplateEntity.templateContent().customFields())
.withScripts(jobTemplateEntity.templateContent().scripts())
.withClassName(jobTemplateEntity.templateContent().className())
.withJars(jobTemplateEntity.templateContent().jars())
.withFiles(jobTemplateEntity.templateContent().files())
.withArchives(jobTemplateEntity.templateContent().archives())
.withConfigs(jobTemplateEntity.templateContent().configs());
for (JobTemplateChange change : changes) {
if (change instanceof JobTemplateChange.RenameJobTemplate) {
newName = ((JobTemplateChange.RenameJobTemplate) change).getNewName();
} else if (change instanceof JobTemplateChange.UpdateJobTemplateComment) {
newComment = ((JobTemplateChange.UpdateJobTemplateComment) change).getNewComment();
} else if (change instanceof JobTemplateChange.UpdateJobTemplate) {
JobTemplateEntity.TemplateContent oldTemplateContent = jobTemplateEntity.templateContent();
JobTemplateChange.TemplateUpdate templateUpdate =
((JobTemplateChange.UpdateJobTemplate) change).getTemplateUpdate();
newTemplateContentBuilder
.withJobType(oldTemplateContent.jobType())
.withExecutable(
updatedValue(
oldTemplateContent.executable(),
Optional.ofNullable(templateUpdate.getNewExecutable())))
.withArguments(
updatedValue(
oldTemplateContent.arguments(),
Optional.ofNullable(templateUpdate.getNewArguments())))
.withEnvironments(
updatedValue(
oldTemplateContent.environments(),
Optional.ofNullable(templateUpdate.getNewEnvironments())))
.withCustomFields(
updatedValue(
oldTemplateContent.customFields(),
Optional.ofNullable(templateUpdate.getNewCustomFields())));
if (templateUpdate instanceof JobTemplateChange.ShellTemplateUpdate) {
Preconditions.checkArgument(
jobTemplateEntity.templateContent().jobType() == JobTemplate.JobType.SHELL,
"Job template %s is not a shell job template, cannot update to shell template",
jobTemplateIdent.name());
JobTemplateChange.ShellTemplateUpdate shellUpdate =
(JobTemplateChange.ShellTemplateUpdate) templateUpdate;
newTemplateContentBuilder.withScripts(
updatedValue(
oldTemplateContent.scripts(), Optional.ofNullable(shellUpdate.getNewScripts())));
} else if (templateUpdate instanceof JobTemplateChange.SparkTemplateUpdate) {
Preconditions.checkArgument(
jobTemplateEntity.templateContent().jobType() == JobTemplate.JobType.SPARK,
"Job template %s is not a spark job template, cannot update to spark template",
jobTemplateIdent.name());
JobTemplateChange.SparkTemplateUpdate sparkUpdate =
(JobTemplateChange.SparkTemplateUpdate) templateUpdate;
newTemplateContentBuilder
.withClassName(
updatedValue(
oldTemplateContent.className(),
Optional.ofNullable(sparkUpdate.getNewClassName())))
.withJars(
updatedValue(
oldTemplateContent.jars(), Optional.ofNullable(sparkUpdate.getNewJars())))
.withFiles(
updatedValue(
oldTemplateContent.files(), Optional.ofNullable(sparkUpdate.getNewFiles())))
.withArchives(
updatedValue(
oldTemplateContent.archives(),
Optional.ofNullable(sparkUpdate.getNewArchives())))
.withConfigs(
updatedValue(
oldTemplateContent.configs(),
Optional.ofNullable(sparkUpdate.getNewConfigs())));
} else {
throw new IllegalArgumentException("Unsupported template update: " + templateUpdate);
}
} else {
throw new IllegalArgumentException("Unsupported job template change: " + change);
}
}
return newTemplateBuilder
.withId(jobTemplateEntity.id())
.withName(newName)
.withComment(newComment)
.withNamespace(jobTemplateIdent.namespace())
.withTemplateContent(newTemplateContentBuilder.build())
.withAuditInfo(
AuditInfo.builder()
.withCreator(jobTemplateEntity.auditInfo().creator())
.withCreateTime(jobTemplateEntity.auditInfo().createTime())
.withLastModifier(PrincipalUtils.getCurrentPrincipal().getName())
.withLastModifiedTime(Instant.now())
.build())
.build();
}
private <T> T updatedValue(T currentValue, Optional<T> newValue) {
return newValue.orElse(currentValue);
}
}
|
apache/oozie | 38,058 | core/src/main/java/org/apache/oozie/CoordinatorActionBean.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.Timestamp;
import java.text.MessageFormat;
import java.util.Date;
import java.util.List;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Lob;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
import javax.persistence.Transient;
import org.apache.hadoop.io.Writable;
import org.apache.oozie.client.CoordinatorAction;
import org.apache.oozie.client.rest.JsonBean;
import org.apache.oozie.client.rest.JsonTags;
import org.apache.oozie.client.rest.JsonUtils;
import org.apache.oozie.coord.input.dependency.CoordInputDependency;
import org.apache.oozie.coord.input.dependency.CoordInputDependencyFactory;
import org.apache.oozie.util.DateUtils;
import org.apache.oozie.util.WritableUtils;
import org.apache.openjpa.persistence.jdbc.Index;
import org.apache.openjpa.persistence.jdbc.Strategy;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
@Entity
@NamedQueries({
@NamedQuery(name = "UPDATE_COORD_ACTION", query = "update CoordinatorActionBean w set w.actionNumber = :actionNumber,"
+ " w.actionXml = :actionXml, w.consoleUrl = :consoleUrl, w.createdConf = :createdConf, w.errorCode = :errorCode,"
+ " w.errorMessage = :errorMessage, w.externalStatus = :externalStatus, w.missingDependencies "
+ "= :missingDependencies, w.runConf = :runConf, w.timeOut = :timeOut, w.trackerUri = :trackerUri, w.type "
+ "= :type, w.createdTimestamp = :createdTime, w.externalId = :externalId, w.jobId = :jobId,"
+ " w.lastModifiedTimestamp = :lastModifiedTime, w.nominalTimestamp = :nominalTime, w.slaXml = :slaXml,"
+ " w.statusStr = :status where w.id = :id"),
@NamedQuery(name = "UPDATE_COORD_ACTION_MIN", query = "update CoordinatorActionBean w set w.actionXml = :actionXml,"
+ " w.missingDependencies = :missingDependencies, w.lastModifiedTimestamp = :lastModifiedTime, w.statusStr "
+ "= :status where w.id = :id"),
// Query to update the action status, pending status and last modified time stamp of a Coordinator action
@NamedQuery(name = "UPDATE_COORD_ACTION_STATUS_PENDING_TIME", query = "update CoordinatorActionBean w set w.statusStr"
+ " =:status, w.pending =:pending, w.lastModifiedTimestamp = :lastModifiedTime where w.id = :id"),
// Update query for InputCheck
@NamedQuery(name = "UPDATE_COORD_ACTION_FOR_INPUTCHECK", query = "update CoordinatorActionBean w set w.statusStr "
+ "= :status, w.lastModifiedTimestamp = :lastModifiedTime, w.actionXml = :actionXml, w.missingDependencies "
+ "= :missingDependencies where w.id = :id"),
// Update query for Push-based missing dependency check
@NamedQuery(name = "UPDATE_COORD_ACTION_FOR_PUSH_INPUTCHECK", query = "update CoordinatorActionBean w set w.statusStr "
+ "= :status, w.lastModifiedTimestamp = :lastModifiedTime, w.actionXml = :actionXml, w.pushMissingDependencies "
+ "= :pushMissingDependencies where w.id = :id"),
// Update query for Push-based missing dependency check
@NamedQuery(name = "UPDATE_COORD_ACTION_DEPENDENCIES", query = "update CoordinatorActionBean w set w.missingDependencies"
+ " = :missingDependencies, w.pushMissingDependencies = :pushMissingDependencies where w.id = :id"),
// Update query for Start
@NamedQuery(name = "UPDATE_COORD_ACTION_FOR_START", query = "update CoordinatorActionBean w set w.statusStr =:status,"
+ " w.lastModifiedTimestamp = :lastModifiedTime, w.runConf = :runConf, w.externalId = :externalId, w.pending "
+ "= :pending, w.errorCode = :errorCode, w.errorMessage = :errorMessage where w.id = :id"),
@NamedQuery(name = "UPDATE_COORD_ACTION_FOR_MODIFIED_DATE", query = "update CoordinatorActionBean w set"
+ " w.lastModifiedTimestamp = :lastModifiedTime where w.id = :id"),
@NamedQuery(name = "UPDATE_COORD_ACTION_RERUN", query = "update CoordinatorActionBean w set w.actionXml =:actionXml,"
+ " w.statusStr = :status, w.externalId = :externalId, w.externalStatus = :externalStatus, w.rerunTimestamp "
+ "= :rerunTime, w.lastModifiedTimestamp = :lastModifiedTime, w.createdTimestamp = :createdTime, w.createdConf "
+ "= :createdConf, w.runConf = :runConf, w.missingDependencies = :missingDependencies, w.pushMissingDependencies "
+ "= :pushMissingDependencies, w.errorCode = :errorCode, w.errorMessage = :errorMessage where w.id = :id"),
@NamedQuery(name = "DELETE_COMPLETED_ACTIONS_FOR_COORDINATOR", query = "delete from CoordinatorActionBean a where a.jobId"
+ " = :jobId and (a.statusStr = 'SUCCEEDED' OR a.statusStr = 'FAILED' OR a.statusStr= 'KILLED')"),
@NamedQuery(name = "DELETE_ACTIONS_FOR_LONG_RUNNING_COORDINATOR", query = "delete from CoordinatorActionBean a where a.id"
+ " IN (:actionId)"),
@NamedQuery(name = "DELETE_UNSCHEDULED_ACTION", query = "delete from CoordinatorActionBean a where a.id = :id "
+ "and (a.statusStr = 'WAITING' OR a.statusStr = 'READY')"),
@NamedQuery(name = "GET_COORD_ACTIONS_FOR_COORDINATOR", query = "select a.id from CoordinatorActionBean a where a.jobId"
+ " = :jobId"),
// Query used by XTestcase to setup tables
@NamedQuery(name = "GET_COORD_ACTIONS", query = "select OBJECT(w) from CoordinatorActionBean w"),
// Select query used only by test cases
@NamedQuery(name = "GET_COORD_ACTION", query = "select OBJECT(a) from CoordinatorActionBean a where a.id = :id"),
// Select query used by SLAService on restart
@NamedQuery(name = "GET_COORD_ACTION_FOR_SLA", query = "select a.id, a.jobId, a.statusStr, a.externalId,"
+ " a.lastModifiedTimestamp from CoordinatorActionBean a where a.id = :id"),
// Select query used by ActionInfo command
@NamedQuery(name = "GET_COORD_ACTION_FOR_INFO", query = "select a.id, a.jobId, a.actionNumber, a.consoleUrl, a.errorCode,"
+ " a.errorMessage, a.externalId, a.externalStatus, a.trackerUri, a.createdTimestamp, a.nominalTimestamp,"
+ " a.statusStr, a.lastModifiedTimestamp, a.missingDependencies, a.pushMissingDependencies "
+ "from CoordinatorActionBean a where a.id = :id"),
// Select Query used by Timeout and skip commands
@NamedQuery(name = "GET_COORD_ACTION_FOR_TIMEOUT", query = "select a.id, a.jobId, a.statusStr, a.runConf, a.pending,"
+ " a.nominalTimestamp, a.createdTimestamp from CoordinatorActionBean a where a.id = :id"),
// Select query used by InputCheck command
@NamedQuery(name = "GET_COORD_ACTION_FOR_INPUTCHECK", query = "select a.id, a.actionNumber, a.jobId, a.statusStr,"
+ " a.runConf, a.nominalTimestamp, a.createdTimestamp, a.actionXml, a.missingDependencies,"
+ " a.pushMissingDependencies, a.timeOut, a.externalId from CoordinatorActionBean a where a.id = :id"),
// Select query used by CoordActionUpdate command
@NamedQuery(name = "GET_COORD_ACTION_FOR_EXTERNALID", query = "select a.id, a.jobId, a.statusStr, a.pending,"
+ " a.externalId, a.lastModifiedTimestamp, a.slaXml, a.nominalTimestamp, a.createdTimestamp "
+ "from CoordinatorActionBean a where a.externalId = :externalId"),
// Select query used by Check command
@NamedQuery(name = "GET_COORD_ACTION_FOR_CHECK", query = "select a.id, a.jobId, a.statusStr, a.pending, a.externalId,"
+ " a.lastModifiedTimestamp, a.slaXml, a.nominalTimestamp, a.createdTimestamp from CoordinatorActionBean a"
+ " where a.id = :id"),
// Select query used by Start command
@NamedQuery(name = "GET_COORD_ACTION_FOR_START", query = "select a.id, a.jobId, a.statusStr, a.pending, a.createdConf,"
+ " a.slaXml, a.actionXml, a.externalId, a.errorMessage, a.errorCode, a.nominalTimestamp, a.createdTimestamp "
+ "from CoordinatorActionBean a where a.id = :id"),
@NamedQuery(name = "GET_COORD_ACTIONS_FOR_JOB_FIFO", query = "select a.id, a.actionNumber, a.jobId, a.statusStr,"
+ " a.pending, a.nominalTimestamp, a.createdTimestamp from CoordinatorActionBean a where a.jobId = :jobId "
+ "AND a.statusStr = 'READY' order by a.nominalTimestamp"),
@NamedQuery(name = "GET_COORD_ACTIONS_FOR_JOB_LIFO", query = "select a.id, a.actionNumber, a.jobId, a.statusStr,"
+ " a.pending, a.nominalTimestamp, a.createdTimestamp from CoordinatorActionBean a where a.jobId = :jobId "
+ "AND a.statusStr = 'READY' order by a.nominalTimestamp desc"),
@NamedQuery(name = "GET_COORD_RUNNING_ACTIONS_COUNT", query = "select count(a) from CoordinatorActionBean a where a.jobId"
+ " = :jobId AND (a.statusStr = 'RUNNING' OR a.statusStr='SUBMITTED')"),
@NamedQuery(name = "GET_COORD_ACTIONS_COUNT_BY_JOBID", query = "select count(a) from CoordinatorActionBean a "
+ "where a.jobId = :jobId"),
@NamedQuery(name = "GET_COORD_ACTIVE_ACTIONS_COUNT_BY_JOBID", query = "select count(a) from CoordinatorActionBean a"
+ " where a.jobId = :jobId AND a.statusStr = 'WAITING'"),
@NamedQuery(name = "GET_COORD_ACTIONS_PENDING_FALSE_COUNT", query = "select count(a) from CoordinatorActionBean a "
+ "where a.jobId = :jobId AND a.pending = 0 AND (a.statusStr = 'SUSPENDED' OR a.statusStr = 'TIMEDOUT' "
+ "OR a.statusStr = 'SUCCEEDED' OR a.statusStr = 'KILLED' OR a.statusStr = 'FAILED')"),
@NamedQuery(name = "GET_COORD_ACTIONS_PENDING_FALSE_STATUS_COUNT", query = "select count(a) from CoordinatorActionBean a"
+ " where a.jobId = :jobId AND a.pending = 0 AND a.statusStr = :status"),
@NamedQuery(name = "GET_ACTIONS_FOR_COORD_JOB", query = "select count(a) from CoordinatorActionBean a where a.jobId"
+ " = :jobId"),
// Query to retrieve Coordinator actions sorted by nominal time
@NamedQuery(name = "GET_ACTIONS_FOR_COORD_JOB_ORDER_BY_NOMINAL_TIME", query = "select a.id, a.actionNumber, a.consoleUrl,"
+ " a.errorCode, a.errorMessage, a.externalId, a.externalStatus, a.jobId, a.trackerUri, a.createdTimestamp,"
+ " a.nominalTimestamp, a.statusStr, a.lastModifiedTimestamp, a.missingDependencies, a.pushMissingDependencies,"
+ " a.timeOut from CoordinatorActionBean a where a.jobId = :jobId order by a.nominalTimestamp"),
// Query to maintain backward compatibility for coord job info command
@NamedQuery(name = "GET_ALL_COLS_FOR_ACTIONS_FOR_COORD_JOB_ORDER_BY_NOMINAL_TIME", query = "select OBJECT(a) "
+ "from CoordinatorActionBean a where a.jobId = :jobId order by a.nominalTimestamp"),
// Query to retrieve action id, action status, pending status and external Id of not completed Coordinator actions
@NamedQuery(name = "GET_COORD_ACTIONS_NOT_COMPLETED", query = "select a.id, a.statusStr, a.pending, a.externalId,"
+ " a.pushMissingDependencies, a.nominalTimestamp, a.createdTimestamp, a.jobId from CoordinatorActionBean a"
+ " where a.jobId = :jobId AND a.statusStr <> 'FAILED' AND a.statusStr <> 'TIMEDOUT' AND a.statusStr "
+ "<> 'SUCCEEDED' AND a.statusStr <> 'KILLED' AND a.statusStr <> 'IGNORED'"),
// Query to retrieve action id, action status, pending status and external Id of running Coordinator actions
@NamedQuery(name = "GET_COORD_ACTIONS_RUNNING", query = "select a.id, a.statusStr, a.pending, a.externalId,"
+ " a.nominalTimestamp, a.createdTimestamp from CoordinatorActionBean a where a.jobId = :jobId "
+ "and a.statusStr = 'RUNNING'"),
// Query to retrieve action id, action status, pending status and external Id of suspended Coordinator actions
@NamedQuery(name = "GET_COORD_ACTIONS_SUSPENDED", query = "select a.id, a.statusStr, a.pending, a.externalId,"
+ " a.nominalTimestamp, a.createdTimestamp from CoordinatorActionBean a where a.jobId = :jobId "
+ "and a.statusStr = 'SUSPENDED'"),
// Query to retrieve count of Coordinator actions which are pending
@NamedQuery(name = "GET_COORD_ACTIONS_PENDING_COUNT", query = "select count(a) from CoordinatorActionBean a where a.jobId"
+ " = :jobId AND a.pending > 0"),
// Query to retrieve status of Coordinator actions
@NamedQuery(name = "GET_COORD_ACTIONS_STATUS_UNIGNORED", query = "select a.statusStr, a.pending "
+ "from CoordinatorActionBean a where a.jobId = :jobId AND a.statusStr <> 'IGNORED'"),
// Query to retrieve status of Coordinator actions
@NamedQuery(name = "GET_COORD_ACTION_STATUS", query = "select a.statusStr from CoordinatorActionBean a where a.id = :id"),
@NamedQuery(name = "GET_COORD_ACTION_FOR_COORD_JOB_BY_ACTION_NUMBER", query = "select a.id from CoordinatorActionBean a"
+ " where a.jobId = :jobId AND a.actionNumber = :actionNumber"),
@NamedQuery(name = "GET_COORD_ACTIONS_BY_LAST_MODIFIED_TIME", query = "select a.jobId from CoordinatorActionBean a "
+ "where a.lastModifiedTimestamp >= :lastModifiedTime"),
//Used by coordinator store only
@NamedQuery(name = "GET_RUNNING_ACTIONS_FOR_COORD_JOB", query = "select OBJECT(a) from CoordinatorActionBean a "
+ "where a.jobId = :jobId AND a.statusStr = 'RUNNING'"),
@NamedQuery(name = "GET_RUNNING_ACTIONS_OLDER_THAN", query = "select a.id from CoordinatorActionBean a where a.statusStr"
+ " = 'RUNNING' AND a.lastModifiedTimestamp <= :lastModifiedTime"),
@NamedQuery(name = "GET_COORD_ACTIONS_WAITING_READY_SUBMITTED_OLDER_THAN", query = "select a.id, a.jobId, a.statusStr,"
+ " a.externalId, a.pushMissingDependencies from CoordinatorActionBean a where (a.statusStr = 'WAITING' "
+ "OR a.statusStr = 'SUBMITTED' OR a.statusStr = 'READY') AND a.lastModifiedTimestamp <= :lastModifiedTime "
+ "and a.nominalTimestamp <= :currentTime and a.jobId in ( select w.id from CoordinatorJobBean w"
+ " where w.statusStr = 'RUNNING' or w.statusStr = 'RUNNINGWITHERROR')"),
@NamedQuery(name = "GET_COORD_ACTIONS_FOR_RECOVERY_OLDER_THAN", query = "select a.id, a.jobId, a.statusStr, a.externalId,"
+ " a.pending from CoordinatorActionBean a where a.pending > 0 AND (a.statusStr = 'SUSPENDED' OR a.statusStr "
+ "= 'KILLED' OR a.statusStr = 'RUNNING') AND a.lastModifiedTimestamp <= :lastModifiedTime"),
// Select query used by rerun, requires almost all columns so select * is used
@NamedQuery(name = "GET_TERMINATED_ACTIONS_FOR_DATES", query = "select OBJECT(a) from CoordinatorActionBean a "
+ "where a.jobId = :jobId AND (a.statusStr = 'TIMEDOUT' OR a.statusStr = 'SUCCEEDED' OR a.statusStr = 'KILLED' "
+ "OR a.statusStr = 'FAILED' OR a.statusStr = 'IGNORED') AND a.nominalTimestamp >= :startTime "
+ "AND a.nominalTimestamp <= :endTime"),
// Select query used by log
@NamedQuery(name = "GET_TERMINATED_ACTION_IDS_FOR_DATES", query = "select a.id from CoordinatorActionBean a where a.jobId"
+ " = :jobId AND (a.statusStr = 'TIMEDOUT' OR a.statusStr = 'SUCCEEDED' OR a.statusStr = 'KILLED' OR a.statusStr"
+ " = 'FAILED') AND a.nominalTimestamp >= :startTime AND a.nominalTimestamp <= :endTime"),
// Select query used by rerun, requires almost all columns so select * is used
@NamedQuery(name = "GET_ACTION_FOR_NOMINALTIME", query = "select OBJECT(a) from CoordinatorActionBean a where a.jobId "
+ "= :jobId AND a.nominalTimestamp = :nominalTime"),
@NamedQuery(name = "GET_ACTIVE_ACTIONS_FOR_DATES", query = "select a.id, a.jobId, a.statusStr, a.externalId, a.pending,"
+ " a.nominalTimestamp, a.createdTimestamp from CoordinatorActionBean a where a.jobId = :jobId AND (a.statusStr "
+ "= 'WAITING' OR a.statusStr = 'READY' OR a.statusStr = 'SUBMITTED' OR a.statusStr = 'RUNNING' OR a.statusStr "
+ "= 'SUSPENDED') AND a.nominalTimestamp >= :startTime AND a.nominalTimestamp <= :endTime"),
@NamedQuery(name = "GET_COORD_ACTIONS_COUNT", query = "select count(w) from CoordinatorActionBean w"),
@NamedQuery(name = "GET_COORD_ACTIONS_COUNT_RUNNING_FOR_RANGE", query = "select count(w) from CoordinatorActionBean w"
+ " where w.statusStr = 'RUNNING' and w.jobId= :jobId and w.id >= :startAction AND w.id <= :endAction"),
@NamedQuery(name = "GET_COORD_ACTIONS_MAX_MODIFIED_DATE_FOR_RANGE", query = "select max(w.lastModifiedTimestamp) "
+ "from CoordinatorActionBean w where w.jobId= :jobId and w.id >= :startAction AND w.id <= :endAction"),
@NamedQuery(name = "GET_ACTIVE_ACTIONS_IDS_FOR_SLA_CHANGE", query = "select a.id, a.nominalTimestamp,"
+ " a.createdTimestamp, a.actionXml from CoordinatorActionBean a where a.id in (:ids)"
+ " and (a.statusStr <> 'FAILED' AND a.statusStr <> 'KILLED' AND a.statusStr <> 'SUCCEEDED' AND a.statusStr "
+ "<> 'TIMEDOUT' AND a.statusStr <> 'IGNORED')"),
@NamedQuery(name = "GET_ACTIVE_ACTIONS_JOBID_FOR_SLA_CHANGE", query = "select a.id, a.nominalTimestamp,"
+ " a.createdTimestamp, a.actionXml from CoordinatorActionBean a where a.jobId = :jobId and (a.statusStr "
+ "<> 'FAILED' AND a.statusStr <> 'KILLED' AND a.statusStr <> 'SUCCEEDED' AND a.statusStr <> 'TIMEDOUT' "
+ "AND a.statusStr <> 'IGNORED')")
})
@Table(name = "COORD_ACTIONS")
public class CoordinatorActionBean implements
Writable,CoordinatorAction,JsonBean {
@Id
private String id;
@Basic
@Index
@Column(name = "job_id")
private String jobId;
@Basic
@Index
@Column(name = "status")
private String statusStr = CoordinatorAction.Status.WAITING.toString();
@Basic
@Index
@Column(name = "nominal_time")
private java.sql.Timestamp nominalTimestamp = null;
@Basic
@Index
@Column(name = "last_modified_time")
private java.sql.Timestamp lastModifiedTimestamp = null;
@Basic
@Index
@Column(name = "created_time")
private java.sql.Timestamp createdTimestamp = null;
@Basic
@Index
@Column(name = "rerun_time")
private java.sql.Timestamp rerunTimestamp = null;
@Basic
@Index
@Column(name = "external_id")
private String externalId;
@Basic
@Column(name = "sla_xml")
@Lob
@Strategy("org.apache.oozie.executor.jpa.StringBlobValueHandler")
private StringBlob slaXml = null;
@Basic
@Column(name = "pending")
private int pending = 0;
@Basic
@Column(name = "job_type")
private String type;
@Basic
@Column(name = "action_number")
private int actionNumber;
@Basic
@Column(name = "created_conf")
@Lob
@Strategy("org.apache.oozie.executor.jpa.StringBlobValueHandler")
private StringBlob createdConf;
@Basic
@Column(name = "time_out")
private int timeOut = 0;
@Basic
@Column(name = "run_conf")
@Lob
@Strategy("org.apache.oozie.executor.jpa.StringBlobValueHandler")
private StringBlob runConf;
@Basic
@Column(name = "action_xml")
@Lob
@Strategy("org.apache.oozie.executor.jpa.StringBlobValueHandler")
private StringBlob actionXml;
@Basic
@Column(name = "missing_dependencies")
@Lob
@Strategy("org.apache.oozie.executor.jpa.StringBlobValueHandler")
private StringBlob missingDependencies;
@Basic
@Column(name = "push_missing_dependencies")
@Lob
@Strategy("org.apache.oozie.executor.jpa.StringBlobValueHandler")
private StringBlob pushMissingDependencies;
@Basic
@Column(name = "external_status")
private String externalStatus;
@Basic
@Column(name = "tracker_uri")
private String trackerUri;
@Basic
@Column(name = "console_url")
private String consoleUrl;
@Basic
@Column(name = "error_code")
private String errorCode;
@Basic
@Column(name = "error_message")
private String errorMessage;
@SuppressWarnings("unchecked")
public JSONObject toJSONObject() {
return toJSONObject("GMT");
}
@Transient
private CoordInputDependency coordPushInputDependency;
@Transient
private CoordInputDependency coordPullInputDependency;
public CoordinatorActionBean() {
}
/**
* Serialize the coordinator bean to a data output.
*
* @param dataOutput data output.
* @throws IOException thrown if the coordinator bean could not be
* serialized.
*/
@Override
public void write(DataOutput dataOutput) throws IOException {
WritableUtils.writeStr(dataOutput, getJobId());
WritableUtils.writeStr(dataOutput, getType());
WritableUtils.writeStr(dataOutput, getId());
WritableUtils.writeStr(dataOutput, getCreatedConf());
WritableUtils.writeStr(dataOutput, getStatus().toString());
dataOutput.writeInt(getActionNumber());
WritableUtils.writeStr(dataOutput, getRunConf());
WritableUtils.writeStr(dataOutput, getExternalStatus());
WritableUtils.writeStr(dataOutput, getTrackerUri());
WritableUtils.writeStr(dataOutput, getConsoleUrl());
WritableUtils.writeStr(dataOutput, getErrorCode());
WritableUtils.writeStr(dataOutput, getErrorMessage());
dataOutput.writeLong((getCreatedTime() != null) ? getCreatedTime().getTime() : -1);
dataOutput.writeLong((getLastModifiedTime() != null) ? getLastModifiedTime().getTime() : -1);
}
/**
* Deserialize a coordinator bean from a data input.
*
* @param dataInput data input.
* @throws IOException thrown if the workflow bean could not be
* deserialized.
*/
@Override
public void readFields(DataInput dataInput) throws IOException {
setJobId(WritableUtils.readStr(dataInput));
setType(WritableUtils.readStr(dataInput));
setId(WritableUtils.readStr(dataInput));
setCreatedConf(WritableUtils.readStr(dataInput));
setStatus(CoordinatorAction.Status.valueOf(WritableUtils.readStr(dataInput)));
setActionNumber(dataInput.readInt());
setRunConf(WritableUtils.readStr(dataInput));
setExternalStatus(WritableUtils.readStr(dataInput));
setTrackerUri(WritableUtils.readStr(dataInput));
setConsoleUrl(WritableUtils.readStr(dataInput));
setErrorCode(WritableUtils.readStr(dataInput));
setErrorMessage(WritableUtils.readStr(dataInput));
long d = dataInput.readLong();
if (d != -1) {
setCreatedTime(new Date(d));
}
d = dataInput.readLong();
if (d != -1) {
setLastModifiedTime(new Date(d));
}
}
@Override
public String getJobId() {
return this.jobId;
}
public void setJobId(String id) {
this.jobId = id;
}
@Override
public Status getStatus() {
return Status.valueOf(statusStr);
}
/**
* Return the status in string
* @return statusStr
*/
public String getStatusStr() {
return statusStr;
}
public void setStatus(Status status) {
this.statusStr = status.toString();
}
public void setStatusStr(String statusStr) {
this.statusStr = statusStr;
}
public void setCreatedTime(Date createdTime) {
this.createdTimestamp = DateUtils.convertDateToTimestamp(createdTime);
}
public void setRerunTime(Date rerunTime) {
this.rerunTimestamp = DateUtils.convertDateToTimestamp(rerunTime);
}
public void setNominalTime(Date nominalTime) {
this.nominalTimestamp = DateUtils.convertDateToTimestamp(nominalTime);
}
public void setLastModifiedTime(Date lastModifiedTime) {
this.lastModifiedTimestamp = DateUtils.convertDateToTimestamp(lastModifiedTime);
}
public Date getCreatedTime() {
return DateUtils.toDate(createdTimestamp);
}
public Timestamp getCreatedTimestamp() {
return createdTimestamp;
}
public Date getRerunTime() {
return DateUtils.toDate(rerunTimestamp);
}
public Timestamp getRerunTimestamp() {
return rerunTimestamp;
}
@Override
public Date getLastModifiedTime() {
return DateUtils.toDate(lastModifiedTimestamp);
}
public Timestamp getLastModifiedTimestamp() {
return lastModifiedTimestamp;
}
@Override
public Date getNominalTime() {
return DateUtils.toDate(nominalTimestamp);
}
public Timestamp getNominalTimestamp() {
return nominalTimestamp;
}
@Override
public String getExternalId() {
return externalId;
}
public void setExternalId(String externalId) {
this.externalId = externalId;
}
public StringBlob getSlaXmlBlob() {
return slaXml;
}
public void setSlaXmlBlob(StringBlob slaXml) {
this.slaXml = slaXml;
}
public String getSlaXml() {
return slaXml == null ? null : slaXml.getString();
}
public void setSlaXml(String slaXml) {
if (this.slaXml == null) {
this.slaXml = new StringBlob(slaXml);
}
else {
this.slaXml.setString(slaXml);
}
}
/**
* @return true if in terminal status
*/
public boolean isTerminalStatus() {
boolean isTerminal = true;
switch (getStatus()) {
case WAITING:
case READY:
case SUBMITTED:
case RUNNING:
case SUSPENDED:
isTerminal = false;
break;
default:
isTerminal = true;
break;
}
return isTerminal;
}
/**
* Return if the action is complete with failure.
*
* @return if the action is complete with failure.
*/
public boolean isTerminalWithFailure() {
boolean result = false;
switch (getStatus()) {
case FAILED:
case KILLED:
case TIMEDOUT:
result = true;
}
return result;
}
/**
* Set some actions are in progress for particular coordinator action.
*
* @param pending set pending to true
*/
public void setPending(int pending) {
this.pending = pending;
}
/**
* increment pending and return it
*
* @return pending
*/
public int incrementAndGetPending() {
this.pending++;
return pending;
}
/**
* decrement pending and return it
*
* @return pending
*/
public int decrementAndGetPending() {
this.pending = Math.max(this.pending - 1, 0);
return pending;
}
/**
* Get some actions are in progress for particular bundle action.
*
* @return pending
*/
public int getPending() {
return this.pending;
}
/**
* Return if the action is pending.
*
* @return if the action is pending.
*/
public boolean isPending() {
return pending > 0 ? true : false;
}
@Override
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public void setActionNumber(int actionNumber) {
this.actionNumber = actionNumber;
}
@Override
public int getActionNumber() {
return actionNumber;
}
@Override
public String getCreatedConf() {
return createdConf == null ? null : createdConf.getString();
}
public void setCreatedConf(String createdConf) {
if (this.createdConf == null) {
this.createdConf = new StringBlob(createdConf);
}
else {
this.createdConf.setString(createdConf);
}
}
public void setCreatedConfBlob(StringBlob createdConf) {
this.createdConf = createdConf;
}
public StringBlob getCreatedConfBlob() {
return createdConf;
}
public void setRunConf(String runConf) {
if (this.runConf == null) {
this.runConf = new StringBlob(runConf);
}
else {
this.runConf.setString(runConf);
}
}
@Override
public String getRunConf() {
return runConf == null ? null : runConf.getString();
}
public void setRunConfBlob(StringBlob runConf) {
this.runConf = runConf;
}
public StringBlob getRunConfBlob() {
return runConf;
}
public void setMissingDependencies(String missingDependencies) {
if (this.missingDependencies == null) {
this.missingDependencies = new StringBlob(missingDependencies);
}
else {
this.missingDependencies.setString(missingDependencies);
}
}
@Override
public String getMissingDependencies() {
return missingDependencies == null ? null : missingDependencies.getString();
}
public void setMissingDependenciesBlob(StringBlob missingDependencies) {
this.missingDependencies = missingDependencies;
}
public StringBlob getMissingDependenciesBlob() {
return missingDependencies;
}
@Override
public String getPushMissingDependencies() {
return pushMissingDependencies == null ? null : pushMissingDependencies.getString();
}
public void setPushMissingDependencies(String pushMissingDependencies) {
if (this.pushMissingDependencies == null) {
this.pushMissingDependencies = new StringBlob(pushMissingDependencies);
}
else {
this.pushMissingDependencies.setString(pushMissingDependencies);
}
}
public void setPushMissingDependenciesBlob(StringBlob pushMissingDependencies) {
this.pushMissingDependencies = pushMissingDependencies;
}
public StringBlob getPushMissingDependenciesBlob() {
return pushMissingDependencies;
}
public String getExternalStatus() {
return externalStatus;
}
public void setExternalStatus(String externalStatus) {
this.externalStatus = externalStatus;
}
@Override
public String getTrackerUri() {
return trackerUri;
}
public void setTrackerUri(String trackerUri) {
this.trackerUri = trackerUri;
}
@Override
public String getConsoleUrl() {
return consoleUrl;
}
public void setConsoleUrl(String consoleUrl) {
this.consoleUrl = consoleUrl;
}
@Override
public String getErrorCode() {
return errorCode;
}
@Override
public String getErrorMessage() {
return errorMessage;
}
public void setErrorInfo(String errorCode, String errorMessage) {
this.errorCode = errorCode;
this.errorMessage = errorMessage;
}
public String getActionXml() {
return actionXml == null ? null : actionXml.getString();
}
public void setActionXml(String actionXml) {
if (this.actionXml == null) {
this.actionXml = new StringBlob(actionXml);
}
else {
this.actionXml.setString(actionXml);
}
}
public void setActionXmlBlob(StringBlob actionXml) {
this.actionXml = actionXml;
}
public StringBlob getActionXmlBlob() {
return actionXml;
}
@Override
public String toString() {
return MessageFormat.format("CoordinatorAction name[{0}] status[{1}]",
getId(), getStatus());
}
public int getTimeOut() {
return timeOut;
}
public void setTimeOut(int timeOut) {
this.timeOut = timeOut;
}
public void setErrorCode(String errorCode) {
this.errorCode = errorCode;
}
public void setErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
}
@SuppressWarnings("unchecked")
public JSONObject toJSONObject(String timeZoneId) {
JSONObject json = new JSONObject();
json.put(JsonTags.COORDINATOR_ACTION_ID, id);
json.put(JsonTags.COORDINATOR_JOB_ID, jobId);
json.put(JsonTags.COORDINATOR_ACTION_TYPE, type);
json.put(JsonTags.COORDINATOR_ACTION_NUMBER, actionNumber);
json.put(JsonTags.COORDINATOR_ACTION_CREATED_CONF, getCreatedConf());
json.put(JsonTags.COORDINATOR_ACTION_CREATED_TIME, JsonUtils.formatDateRfc822(getCreatedTime(), timeZoneId));
json.put(JsonTags.COORDINATOR_ACTION_NOMINAL_TIME, JsonUtils.formatDateRfc822(getNominalTime(), timeZoneId));
json.put(JsonTags.COORDINATOR_ACTION_EXTERNALID, externalId);
// json.put(JsonTags.COORDINATOR_ACTION_START_TIME, JsonUtils
// .formatDateRfc822(startTime), timeZoneId);
json.put(JsonTags.COORDINATOR_ACTION_STATUS, statusStr);
json.put(JsonTags.COORDINATOR_ACTION_RUNTIME_CONF, getRunConf());
json.put(JsonTags.COORDINATOR_ACTION_LAST_MODIFIED_TIME,
JsonUtils.formatDateRfc822(getLastModifiedTime(), timeZoneId));
// json.put(JsonTags.COORDINATOR_ACTION_START_TIME, JsonUtils
// .formatDateRfc822(startTime), timeZoneId);
// json.put(JsonTags.COORDINATOR_ACTION_END_TIME, JsonUtils
// .formatDateRfc822(endTime), timeZoneId);
json.put(JsonTags.COORDINATOR_ACTION_MISSING_DEPS, getPullInputDependencies().getMissingDependencies());
json.put(JsonTags.COORDINATOR_ACTION_PUSH_MISSING_DEPS, getPushInputDependencies().getMissingDependencies());
json.put(JsonTags.COORDINATOR_ACTION_EXTERNAL_STATUS, externalStatus);
json.put(JsonTags.COORDINATOR_ACTION_TRACKER_URI, trackerUri);
json.put(JsonTags.COORDINATOR_ACTION_CONSOLE_URL, consoleUrl);
json.put(JsonTags.COORDINATOR_ACTION_ERROR_CODE, errorCode);
json.put(JsonTags.COORDINATOR_ACTION_ERROR_MESSAGE, errorMessage);
json.put(JsonTags.TO_STRING, toString());
return json;
}
/**
* Convert a nodes list into a JSONArray.
*
* @param actions nodes list.
* @param timeZoneId time zone to use for dates in the JSON array.
* @return the corresponding JSON array.
*/
@SuppressWarnings("unchecked")
public static JSONArray toJSONArray(List<CoordinatorActionBean> actions, String timeZoneId) {
JSONArray array = new JSONArray();
for (CoordinatorActionBean action : actions) {
array.add(action.toJSONObject(timeZoneId));
}
return array;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
CoordinatorActionBean other = (CoordinatorActionBean) obj;
if (id == null) {
if (other.id != null) {
return false;
}
}
else if (!id.equals(other.id)) {
return false;
}
return true;
}
public CoordInputDependency getPullInputDependencies() {
if (coordPullInputDependency == null) {
coordPullInputDependency = CoordInputDependencyFactory.getPullInputDependencies(missingDependencies);
}
return coordPullInputDependency;
}
public CoordInputDependency getPushInputDependencies() {
if (coordPushInputDependency == null) {
coordPushInputDependency = CoordInputDependencyFactory.getPushInputDependencies(pushMissingDependencies);
}
return coordPushInputDependency;
}
public void setPullInputDependencies(CoordInputDependency coordPullInputDependency) {
this.coordPullInputDependency = coordPullInputDependency;
}
public void setPushInputDependencies(CoordInputDependency coordPushInputDependency) {
this.coordPushInputDependency = coordPushInputDependency;
}
}
|
google/j2objc | 38,339 | jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/util/Currency.java | /* GENERATED SOURCE. DO NOT MODIFY. */
// © 2016 and later: Unicode, Inc. and others.
// License & terms of use: http://www.unicode.org/copyright.html#License
/**
*******************************************************************************
* Copyright (C) 2001-2016, International Business Machines Corporation and
* others. All Rights Reserved.
*******************************************************************************
*/
package android.icu.util;
import java.io.ObjectStreamException;
import java.lang.ref.SoftReference;
import java.text.ParsePosition;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.Set;
import android.icu.impl.CacheBase;
import android.icu.impl.ICUCache;
import android.icu.impl.ICUData;
import android.icu.impl.ICUDebug;
import android.icu.impl.ICUResourceBundle;
import android.icu.impl.SimpleCache;
import android.icu.impl.SoftCache;
import android.icu.impl.TextTrieMap;
import android.icu.text.CurrencyDisplayNames;
import android.icu.text.CurrencyMetaInfo;
import android.icu.text.CurrencyMetaInfo.CurrencyDigits;
import android.icu.text.CurrencyMetaInfo.CurrencyFilter;
import android.icu.util.ULocale.Category;
/**
* A class encapsulating a currency, as defined by ISO 4217. A
* <tt>Currency</tt> object can be created given a <tt>Locale</tt> or
* given an ISO 4217 code. Once created, the <tt>Currency</tt> object
* can return various data necessary to its proper display:
*
* <ul><li>A display symbol, for a specific locale
* <li>The number of fraction digits to display
* <li>A rounding increment
* </ul>
*
* The <tt>DecimalFormat</tt> class uses these data to display
* currencies.
*
* <p>Note: This class deliberately resembles
* <tt>java.util.Currency</tt> but it has a completely independent
* implementation, and adds features not present in the JDK.
* @author Alan Liu
*/
public class Currency extends MeasureUnit {
private static final long serialVersionUID = -5839973855554750484L;
private static final boolean DEBUG = ICUDebug.enabled("currency");
// Cache to save currency name trie
private static ICUCache<ULocale, List<TextTrieMap<CurrencyStringInfo>>> CURRENCY_NAME_CACHE =
new SimpleCache<ULocale, List<TextTrieMap<CurrencyStringInfo>>>();
/**
* Selector for getName() indicating a symbolic name for a
* currency, such as "$" for USD.
*/
public static final int SYMBOL_NAME = 0;
/**
* Selector for getName() indicating the long name for a
* currency, such as "US Dollar" for USD.
*/
public static final int LONG_NAME = 1;
/**
* Selector for getName() indicating the plural long name for a
* currency, such as "US dollar" for USD in "1 US dollar",
* and "US dollars" for USD in "2 US dollars".
*/
public static final int PLURAL_LONG_NAME = 2;
private static final EquivalenceRelation<String> EQUIVALENT_CURRENCY_SYMBOLS =
new EquivalenceRelation<String>()
.add("\u00a5", "\uffe5")
.add("$", "\ufe69", "\uff04")
.add("\u20a8", "\u20b9")
.add("\u00a3", "\u20a4");
/**
* Currency Usage used for Decimal Format
*/
public enum CurrencyUsage{
/**
* a setting to specify currency usage which determines currency digit and rounding
* for standard usage, for example: "50.00 NT$"
*/
STANDARD,
/**
* a setting to specify currency usage which determines currency digit and rounding
* for cash usage, for example: "50 NT$"
*/
CASH
}
// begin registry stuff
// shim for service code
/* package */ static abstract class ServiceShim {
abstract ULocale[] getAvailableULocales();
abstract Locale[] getAvailableLocales();
abstract Currency createInstance(ULocale l);
abstract Object registerInstance(Currency c, ULocale l);
abstract boolean unregister(Object f);
}
private static ServiceShim shim;
private static ServiceShim getShim() {
// Note: this instantiation is safe on loose-memory-model configurations
// despite lack of synchronization, since the shim instance has no state--
// it's all in the class init. The worst problem is we might instantiate
// two shim instances, but they'll share the same state so that's ok.
if (shim == null) {
try {
Class<?> cls = Class.forName("android.icu.util.CurrencyServiceShim");
shim = (ServiceShim)cls.newInstance();
}
catch (Exception e) {
if(DEBUG){
e.printStackTrace();
}
throw new RuntimeException(e.getMessage());
}
}
return shim;
}
/**
* Returns a currency object for the default currency in the given
* locale.
* @param locale the locale
* @return the currency object for this locale
*/
public static Currency getInstance(Locale locale) {
return getInstance(ULocale.forLocale(locale));
}
/**
* Returns a currency object for the default currency in the given
* locale.
*/
public static Currency getInstance(ULocale locale) {
String currency = locale.getKeywordValue("currency");
if (currency != null) {
return getInstance(currency);
}
if (shim == null) {
return createCurrency(locale);
}
return shim.createInstance(locale);
}
/**
* Returns an array of Strings which contain the currency
* identifiers that are valid for the given locale on the
* given date. If there are no such identifiers, returns null.
* Returned identifiers are in preference order.
* @param loc the locale for which to retrieve currency codes.
* @param d the date for which to retrieve currency codes for the given locale.
* @return The array of ISO currency codes.
*/
public static String[] getAvailableCurrencyCodes(ULocale loc, Date d) {
String region = ULocale.getRegionForSupplementalData(loc, false);
CurrencyFilter filter = CurrencyFilter.onDate(d).withRegion(region);
List<String> list = getTenderCurrencies(filter);
// Note: Prior to 4.4 the spec didn't say that we return null if there are no results, but
// the test assumed it did. Kept the behavior and amended the spec.
if (list.isEmpty()) {
return null;
}
return list.toArray(new String[list.size()]);
}
/**
* Returns an array of Strings which contain the currency
* identifiers that are valid for the given {@link java.util.Locale} on the
* given date. If there are no such identifiers, returns null.
* Returned identifiers are in preference order.
* @param loc the {@link java.util.Locale} for which to retrieve currency codes.
* @param d the date for which to retrieve currency codes for the given locale.
* @return The array of ISO currency codes.
*/
public static String[] getAvailableCurrencyCodes(Locale loc, Date d) {
return getAvailableCurrencyCodes(ULocale.forLocale(loc), d);
}
/**
* Returns the set of available currencies. The returned set of currencies contains all of the
* available currencies, including obsolete ones. The result set can be modified without
* affecting the available currencies in the runtime.
*
* @return The set of available currencies. The returned set could be empty if there is no
* currency data available.
*/
public static Set<Currency> getAvailableCurrencies() {
CurrencyMetaInfo info = CurrencyMetaInfo.getInstance();
List<String> list = info.currencies(CurrencyFilter.all());
HashSet<Currency> resultSet = new HashSet<Currency>(list.size());
for (String code : list) {
resultSet.add(getInstance(code));
}
return resultSet;
}
private static final String EUR_STR = "EUR";
private static final CacheBase<String, Currency, Void> regionCurrencyCache =
new SoftCache<String, Currency, Void>() {
@Override
protected Currency createInstance(String key, Void unused) {
return loadCurrency(key);
}
};
/**
* Instantiate a currency from resource data.
*/
/* package */ static Currency createCurrency(ULocale loc) {
String variant = loc.getVariant();
if ("EURO".equals(variant)) {
return getInstance(EUR_STR);
}
// Cache the currency by region, and whether variant=PREEURO.
// Minimizes the size of the cache compared with caching by ULocale.
String key = ULocale.getRegionForSupplementalData(loc, false);
if ("PREEURO".equals(variant)) {
key = key + '-';
}
return regionCurrencyCache.getInstance(key, null);
}
private static Currency loadCurrency(String key) {
String region;
boolean isPreEuro;
if (key.endsWith("-")) {
region = key.substring(0, key.length() - 1);
isPreEuro = true;
} else {
region = key;
isPreEuro = false;
}
CurrencyMetaInfo info = CurrencyMetaInfo.getInstance();
List<String> list = info.currencies(CurrencyFilter.onRegion(region));
if (!list.isEmpty()) {
String code = list.get(0);
if (isPreEuro && EUR_STR.equals(code)) {
if (list.size() < 2) {
return null;
}
code = list.get(1);
}
return getInstance(code);
}
return null;
}
/**
* Returns a currency object given an ISO 4217 3-letter code.
* @param theISOCode the iso code
* @return the currency for this iso code
* @throws NullPointerException if <code>theISOCode</code> is null.
* @throws IllegalArgumentException if <code>theISOCode</code> is not a
* 3-letter alpha code.
*/
public static Currency getInstance(String theISOCode) {
if (theISOCode == null) {
throw new NullPointerException("The input currency code is null.");
}
if (!isAlpha3Code(theISOCode)) {
throw new IllegalArgumentException(
"The input currency code is not 3-letter alphabetic code.");
}
return (Currency) MeasureUnit.internalGetInstance("currency", theISOCode.toUpperCase(Locale.ENGLISH));
}
private static boolean isAlpha3Code(String code) {
if (code.length() != 3) {
return false;
} else {
for (int i = 0; i < 3; i++) {
char ch = code.charAt(i);
if (ch < 'A' || (ch > 'Z' && ch < 'a') || ch > 'z') {
return false;
}
}
}
return true;
}
/**
* Registers a new currency for the provided locale. The returned object
* is a key that can be used to unregister this currency object.
*
* <p>Because ICU may choose to cache Currency objects internally, this must
* be called at application startup, prior to any calls to
* Currency.getInstance to avoid undefined behavior.
*
* @param currency the currency to register
* @param locale the ulocale under which to register the currency
* @return a registry key that can be used to unregister this currency
* @see #unregister
* @hide unsupported on Android
*/
public static Object registerInstance(Currency currency, ULocale locale) {
return getShim().registerInstance(currency, locale);
}
/**
* Unregister the currency associated with this key (obtained from
* registerInstance).
* @param registryKey the registry key returned from registerInstance
* @see #registerInstance
* @hide unsupported on Android
*/
public static boolean unregister(Object registryKey) {
if (registryKey == null) {
throw new IllegalArgumentException("registryKey must not be null");
}
if (shim == null) {
return false;
}
return shim.unregister(registryKey);
}
/**
* Return an array of the locales for which a currency
* is defined.
* @return an array of the available locales
*/
public static Locale[] getAvailableLocales() {
if (shim == null) {
return ICUResourceBundle.getAvailableLocales();
} else {
return shim.getAvailableLocales();
}
}
/**
* Return an array of the ulocales for which a currency
* is defined.
* @return an array of the available ulocales
*/
public static ULocale[] getAvailableULocales() {
if (shim == null) {
return ICUResourceBundle.getAvailableULocales();
} else {
return shim.getAvailableULocales();
}
}
// end registry stuff
/**
* Given a key and a locale, returns an array of values for the key for which data
* exists. If commonlyUsed is true, these are the values that typically are used
* with this locale, otherwise these are all values for which data exists.
* This is a common service API.
* <p>
* The only supported key is "currency", other values return an empty array.
* <p>
* Currency information is based on the region of the locale. If the locale does not
* indicate a region, {@link ULocale#addLikelySubtags(ULocale)} is used to infer a region,
* except for the 'und' locale.
* <p>
* If commonlyUsed is true, only the currencies known to be in use as of the current date
* are returned. When there are more than one, these are returned in preference order
* (typically, this occurs when a country is transitioning to a new currency, and the
* newer currency is preferred), see
* <a href="http://unicode.org/reports/tr35/#Supplemental_Currency_Data">Unicode TR#35 Sec. C1</a>.
* If commonlyUsed is false, all currencies ever used in any locale are returned, in no
* particular order.
*
* @param key key whose values to look up. the only recognized key is "currency"
* @param locale the locale
* @param commonlyUsed if true, return only values that are currently used in the locale.
* Otherwise returns all values.
* @return an array of values for the given key and the locale. If there is no data, the
* array will be empty.
*/
public static final String[] getKeywordValuesForLocale(String key, ULocale locale,
boolean commonlyUsed) {
// The only keyword we recognize is 'currency'
if (!"currency".equals(key)) {
return EMPTY_STRING_ARRAY;
}
if (!commonlyUsed) {
// Behavior change from 4.3.3, no longer sort the currencies
return getAllTenderCurrencies().toArray(new String[0]);
}
// Don't resolve region if the requested locale is 'und', it will resolve to US
// which we don't want.
if (UND.equals(locale)) {
return EMPTY_STRING_ARRAY;
}
String prefRegion = ULocale.getRegionForSupplementalData(locale, true);
CurrencyFilter filter = CurrencyFilter.now().withRegion(prefRegion);
// currencies are in region's preferred order when we're filtering on region, which
// matches our spec
List<String> result = getTenderCurrencies(filter);
// No fallback anymore (change from 4.3.3)
if (result.size() == 0) {
return EMPTY_STRING_ARRAY;
}
return result.toArray(new String[result.size()]);
}
private static final ULocale UND = new ULocale("und");
private static final String[] EMPTY_STRING_ARRAY = new String[0];
/**
* Returns the ISO 4217 3-letter code for this currency object.
*/
public String getCurrencyCode() {
return subType;
}
/**
* Returns the ISO 4217 numeric code for this currency object.
* <p>Note: If the ISO 4217 numeric code is not assigned for the currency or
* the currency is unknown, this method returns 0.</p>
* @return The ISO 4217 numeric code of this currency.
*/
public int getNumericCode() {
int result = 0;
try {
UResourceBundle bundle = UResourceBundle.getBundleInstance(
ICUData.ICU_BASE_NAME,
"currencyNumericCodes",
ICUResourceBundle.ICU_DATA_CLASS_LOADER);
UResourceBundle codeMap = bundle.get("codeMap");
UResourceBundle numCode = codeMap.get(subType);
result = numCode.getInt();
} catch (MissingResourceException e) {
// fall through
}
return result;
}
/**
* Convenience and compatibility override of getName that
* requests the symbol name for the default <code>DISPLAY</code> locale.
* @see #getName
* @see Category#DISPLAY
*/
public String getSymbol() {
return getSymbol(ULocale.getDefault(Category.DISPLAY));
}
/**
* Convenience and compatibility override of getName that
* requests the symbol name.
* @param loc the Locale for the symbol
* @see #getName
*/
public String getSymbol(Locale loc) {
return getSymbol(ULocale.forLocale(loc));
}
/**
* Convenience and compatibility override of getName that
* requests the symbol name.
* @param uloc the ULocale for the symbol
* @see #getName
*/
public String getSymbol(ULocale uloc) {
return getName(uloc, SYMBOL_NAME, new boolean[1]);
}
/**
* Returns the display name for the given currency in the
* given locale.
* This is a convenient method for
* getName(ULocale, int, boolean[]);
*/
public String getName(Locale locale,
int nameStyle,
boolean[] isChoiceFormat) {
return getName(ULocale.forLocale(locale), nameStyle, isChoiceFormat);
}
/**
* Returns the display name for the given currency in the
* given locale. For example, the display name for the USD
* currency object in the en_US locale is "$".
* @param locale locale in which to display currency
* @param nameStyle selector for which kind of name to return.
* The nameStyle should be either SYMBOL_NAME or
* LONG_NAME. Otherwise, throw IllegalArgumentException.
* @param isChoiceFormat fill-in; isChoiceFormat[0] is set to true
* if the returned value is a ChoiceFormat pattern; otherwise it
* is set to false
* @return display string for this currency. If the resource data
* contains no entry for this currency, then the ISO 4217 code is
* returned. If isChoiceFormat[0] is true, then the result is a
* ChoiceFormat pattern. Otherwise it is a static string. <b>Note:</b>
* as of ICU 4.4, choice formats are not used, and the value returned
* in isChoiceFormat is always false.
* <p>
* @throws IllegalArgumentException if the nameStyle is not SYMBOL_NAME
* or LONG_NAME.
* @see #getName(ULocale, int, String, boolean[])
*/
public String getName(ULocale locale, int nameStyle, boolean[] isChoiceFormat) {
if (!(nameStyle == SYMBOL_NAME || nameStyle == LONG_NAME)) {
throw new IllegalArgumentException("bad name style: " + nameStyle);
}
// We no longer support choice format data in names. Data should not contain
// choice patterns.
if (isChoiceFormat != null) {
isChoiceFormat[0] = false;
}
CurrencyDisplayNames names = CurrencyDisplayNames.getInstance(locale);
return nameStyle == SYMBOL_NAME ? names.getSymbol(subType) : names.getName(subType);
}
/**
* Returns the display name for the given currency in the given locale.
* This is a convenience overload of getName(ULocale, int, String, boolean[]);
*/
public String getName(Locale locale, int nameStyle, String pluralCount,
boolean[] isChoiceFormat) {
return getName(ULocale.forLocale(locale), nameStyle, pluralCount, isChoiceFormat);
}
/**
* Returns the display name for the given currency in the
* given locale. For example, the SYMBOL_NAME for the USD
* currency object in the en_US locale is "$".
* The PLURAL_LONG_NAME for the USD currency object when the currency
* amount is plural is "US dollars", such as in "3.00 US dollars";
* while the PLURAL_LONG_NAME for the USD currency object when the currency
* amount is singular is "US dollar", such as in "1.00 US dollar".
* @param locale locale in which to display currency
* @param nameStyle selector for which kind of name to return
* @param pluralCount plural count string for this locale
* @param isChoiceFormat fill-in; isChoiceFormat[0] is set to true
* if the returned value is a ChoiceFormat pattern; otherwise it
* is set to false
* @return display string for this currency. If the resource data
* contains no entry for this currency, then the ISO 4217 code is
* returned. If isChoiceFormat[0] is true, then the result is a
* ChoiceFormat pattern. Otherwise it is a static string. <b>Note:</b>
* as of ICU 4.4, choice formats are not used, and the value returned
* in isChoiceFormat is always false.
* @throws IllegalArgumentException if the nameStyle is not SYMBOL_NAME,
* LONG_NAME, or PLURAL_LONG_NAME.
*/
public String getName(ULocale locale, int nameStyle, String pluralCount,
boolean[] isChoiceFormat) {
if (nameStyle != PLURAL_LONG_NAME) {
return getName(locale, nameStyle, isChoiceFormat);
}
// We no longer support choice format
if (isChoiceFormat != null) {
isChoiceFormat[0] = false;
}
CurrencyDisplayNames names = CurrencyDisplayNames.getInstance(locale);
return names.getPluralName(subType, pluralCount);
}
/**
* Returns the display name for this currency in the default locale.
* If the resource data for the default locale contains no entry for this currency,
* then the ISO 4217 code is returned.
* <p>
* Note: This method is a convenience equivalent for
* {@link java.util.Currency#getDisplayName()} and is equivalent to
* <code>getName(Locale.getDefault(), LONG_NAME, null)</code>.
*
* @return The display name of this currency
* @see #getDisplayName(Locale)
* @see #getName(Locale, int, boolean[])
*/
@SuppressWarnings("javadoc") // java.util.Currency#getDisplayName() is introduced in Java 7
public String getDisplayName() {
return getName(Locale.getDefault(), LONG_NAME, null);
}
/**
* Returns the display name for this currency in the given locale.
* If the resource data for the given locale contains no entry for this currency,
* then the ISO 4217 code is returned.
* <p>
* Note: This method is a convenience equivalent for
* {@link java.util.Currency#getDisplayName(java.util.Locale)} and is equivalent
* to <code>getName(locale, LONG_NAME, null)</code>.
*
* @param locale locale in which to display currency
* @return The display name of this currency for the specified locale
* @see #getDisplayName(Locale)
* @see #getName(Locale, int, boolean[])
*/
@SuppressWarnings("javadoc") // java.util.Currency#getDisplayName() is introduced in Java 7
public String getDisplayName(Locale locale) {
return getName(locale, LONG_NAME, null);
}
/**
* Attempt to parse the given string as a currency, either as a
* display name in the given locale, or as a 3-letter ISO 4217
* code. If multiple display names match, then the longest one is
* selected. If both a display name and a 3-letter ISO code
* match, then the display name is preferred, unless it's length
* is less than 3.
*
* @param locale the locale of the display names to match
* @param text the text to parse
* @param type parse against currency type: LONG_NAME only or not
* @param pos input-output position; on input, the position within
* text to match; must have 0 <= pos.getIndex() < text.length();
* on output, the position after the last matched character. If
* the parse fails, the position in unchanged upon output.
* @return the ISO 4217 code, as a string, of the best match, or
* null if there is no match
*
* @deprecated This API is ICU internal only.
* @hide original deprecated declaration
* @hide draft / provisional / internal are hidden on Android
*/
@Deprecated
public static String parse(ULocale locale, String text, int type, ParsePosition pos) {
List<TextTrieMap<CurrencyStringInfo>> currencyTrieVec = CURRENCY_NAME_CACHE.get(locale);
if (currencyTrieVec == null) {
TextTrieMap<CurrencyStringInfo> currencyNameTrie =
new TextTrieMap<CurrencyStringInfo>(true);
TextTrieMap<CurrencyStringInfo> currencySymbolTrie =
new TextTrieMap<CurrencyStringInfo>(false);
currencyTrieVec = new ArrayList<TextTrieMap<CurrencyStringInfo>>();
currencyTrieVec.add(currencySymbolTrie);
currencyTrieVec.add(currencyNameTrie);
setupCurrencyTrieVec(locale, currencyTrieVec);
CURRENCY_NAME_CACHE.put(locale, currencyTrieVec);
}
int maxLength = 0;
String isoResult = null;
// look for the names
TextTrieMap<CurrencyStringInfo> currencyNameTrie = currencyTrieVec.get(1);
CurrencyNameResultHandler handler = new CurrencyNameResultHandler();
currencyNameTrie.find(text, pos.getIndex(), handler);
isoResult = handler.getBestCurrencyISOCode();
maxLength = handler.getBestMatchLength();
if (type != Currency.LONG_NAME) { // not long name only
TextTrieMap<CurrencyStringInfo> currencySymbolTrie = currencyTrieVec.get(0);
handler = new CurrencyNameResultHandler();
currencySymbolTrie.find(text, pos.getIndex(), handler);
if (handler.getBestMatchLength() > maxLength) {
isoResult = handler.getBestCurrencyISOCode();
maxLength = handler.getBestMatchLength();
}
}
int start = pos.getIndex();
pos.setIndex(start + maxLength);
return isoResult;
}
private static void setupCurrencyTrieVec(ULocale locale,
List<TextTrieMap<CurrencyStringInfo>> trieVec) {
TextTrieMap<CurrencyStringInfo> symTrie = trieVec.get(0);
TextTrieMap<CurrencyStringInfo> trie = trieVec.get(1);
CurrencyDisplayNames names = CurrencyDisplayNames.getInstance(locale);
for (Map.Entry<String, String> e : names.symbolMap().entrySet()) {
String symbol = e.getKey();
String isoCode = e.getValue();
// Register under not just symbol, but under every equivalent symbol as well
// e.g short width yen and long width yen.
for (String equivalentSymbol : EQUIVALENT_CURRENCY_SYMBOLS.get(symbol)) {
symTrie.put(equivalentSymbol, new CurrencyStringInfo(isoCode, symbol));
}
}
for (Map.Entry<String, String> e : names.nameMap().entrySet()) {
String name = e.getKey();
String isoCode = e.getValue();
trie.put(name, new CurrencyStringInfo(isoCode, name));
}
}
private static final class CurrencyStringInfo {
private String isoCode;
private String currencyString;
public CurrencyStringInfo(String isoCode, String currencyString) {
this.isoCode = isoCode;
this.currencyString = currencyString;
}
public String getISOCode() {
return isoCode;
}
@SuppressWarnings("unused")
public String getCurrencyString() {
return currencyString;
}
}
private static class CurrencyNameResultHandler
implements TextTrieMap.ResultHandler<CurrencyStringInfo> {
// The length of longest matching key
private int bestMatchLength;
// The currency ISO code of longest matching key
private String bestCurrencyISOCode;
// As the trie is traversed, handlePrefixMatch is called at each node. matchLength is the
// length length of the key at the current node; values is the list of all the values mapped to
// that key. matchLength increases with each call as trie is traversed.
@Override
public boolean handlePrefixMatch(int matchLength, Iterator<CurrencyStringInfo> values) {
if (values.hasNext()) {
// Since the best match criteria is only based on length of key in trie and since all the
// values are mapped to the same key, we only need to examine the first value.
bestCurrencyISOCode = values.next().getISOCode();
bestMatchLength = matchLength;
}
return true;
}
public String getBestCurrencyISOCode() {
return bestCurrencyISOCode;
}
public int getBestMatchLength() {
return bestMatchLength;
}
}
/**
* Returns the number of the number of fraction digits that should
* be displayed for this currency.
* This is equivalent to getDefaultFractionDigits(CurrencyUsage.STANDARD);
* @return a non-negative number of fraction digits to be
* displayed
*/
public int getDefaultFractionDigits() {
return getDefaultFractionDigits(CurrencyUsage.STANDARD);
}
/**
* Returns the number of the number of fraction digits that should
* be displayed for this currency with Usage.
* @param Usage the usage of currency(Standard or Cash)
* @return a non-negative number of fraction digits to be
* displayed
*/
public int getDefaultFractionDigits(CurrencyUsage Usage) {
CurrencyMetaInfo info = CurrencyMetaInfo.getInstance();
CurrencyDigits digits = info.currencyDigits(subType, Usage);
return digits.fractionDigits;
}
/**
* Returns the rounding increment for this currency, or 0.0 if no
* rounding is done by this currency.
* This is equivalent to getRoundingIncrement(CurrencyUsage.STANDARD);
* @return the non-negative rounding increment, or 0.0 if none
*/
public double getRoundingIncrement() {
return getRoundingIncrement(CurrencyUsage.STANDARD);
}
/**
* Returns the rounding increment for this currency, or 0.0 if no
* rounding is done by this currency with the Usage.
* @param Usage the usage of currency(Standard or Cash)
* @return the non-negative rounding increment, or 0.0 if none
*/
public double getRoundingIncrement(CurrencyUsage Usage) {
CurrencyMetaInfo info = CurrencyMetaInfo.getInstance();
CurrencyDigits digits = info.currencyDigits(subType, Usage);
int data1 = digits.roundingIncrement;
// If there is no rounding return 0.0 to indicate no rounding.
// This is the high-runner case, by far.
if (data1 == 0) {
return 0.0;
}
int data0 = digits.fractionDigits;
// If the meta data is invalid, return 0.0 to indicate no rounding.
if (data0 < 0 || data0 >= POW10.length) {
return 0.0;
}
// Return data[1] / 10^(data[0]). The only actual rounding data,
// as of this writing, is CHF { 2, 25 }.
return (double) data1 / POW10[data0];
}
/**
* Returns the ISO 4217 code for this currency.
*/
@Override
public String toString() {
return subType;
}
/**
* Constructs a currency object for the given ISO 4217 3-letter
* code. This constructor assumes that the code is valid.
*
* @param theISOCode The iso code used to construct the currency.
*/
protected Currency(String theISOCode) {
super("currency", theISOCode);
// isoCode is kept for readResolve() and Currency class no longer
// use it. So this statement actually does not have any effect.
isoCode = theISOCode;
}
// POW10[i] = 10^i
private static final int[] POW10 = {
1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000
};
private static SoftReference<List<String>> ALL_TENDER_CODES;
private static SoftReference<Set<String>> ALL_CODES_AS_SET;
/*
* Returns an unmodifiable String list including all known tender currency codes.
*/
private static synchronized List<String> getAllTenderCurrencies() {
List<String> all = (ALL_TENDER_CODES == null) ? null : ALL_TENDER_CODES.get();
if (all == null) {
// Filter out non-tender currencies which have "from" date set to 9999-12-31
// CurrencyFilter has "to" value set to 9998-12-31 in order to exclude them
//CurrencyFilter filter = CurrencyFilter.onDateRange(null, new Date(253373299200000L));
CurrencyFilter filter = CurrencyFilter.all();
all = Collections.unmodifiableList(getTenderCurrencies(filter));
ALL_TENDER_CODES = new SoftReference<List<String>>(all);
}
return all;
}
private static synchronized Set<String> getAllCurrenciesAsSet() {
Set<String> all = (ALL_CODES_AS_SET == null) ? null : ALL_CODES_AS_SET.get();
if (all == null) {
CurrencyMetaInfo info = CurrencyMetaInfo.getInstance();
all = Collections.unmodifiableSet(
new HashSet<String>(info.currencies(CurrencyFilter.all())));
ALL_CODES_AS_SET = new SoftReference<Set<String>>(all);
}
return all;
}
/**
* Queries if the given ISO 4217 3-letter code is available on the specified date range.
* <p>
* Note: For checking availability of a currency on a specific date, specify the date on both <code>from</code> and
* <code>to</code>. When both <code>from</code> and <code>to</code> are null, this method checks if the specified
* currency is available all time.
*
* @param code
* The ISO 4217 3-letter code.
* @param from
* The lower bound of the date range, inclusive. When <code>from</code> is null, check the availability
* of the currency any date before <code>to</code>
* @param to
* The upper bound of the date range, inclusive. When <code>to</code> is null, check the availability of
* the currency any date after <code>from</code>
* @return true if the given ISO 4217 3-letter code is supported on the specified date range.
* @throws IllegalArgumentException when <code>to</code> is before <code>from</code>.
*/
public static boolean isAvailable(String code, Date from, Date to) {
if (!isAlpha3Code(code)) {
return false;
}
if (from != null && to != null && from.after(to)) {
throw new IllegalArgumentException("To is before from");
}
code = code.toUpperCase(Locale.ENGLISH);
boolean isKnown = getAllCurrenciesAsSet().contains(code);
if (isKnown == false) {
return false;
} else if (from == null && to == null) {
return true;
}
// If caller passed a date range, we cannot rely solely on the cache
CurrencyMetaInfo info = CurrencyMetaInfo.getInstance();
List<String> allActive = info.currencies(
CurrencyFilter.onDateRange(from, to).withCurrency(code));
return allActive.contains(code);
}
/**
* Returns the list of remaining tender currencies after a filter is applied.
* @param filter the filter to apply to the tender currencies
* @return a list of tender currencies
*/
private static List<String> getTenderCurrencies(CurrencyFilter filter) {
CurrencyMetaInfo info = CurrencyMetaInfo.getInstance();
return info.currencies(filter.withTender());
}
private static final class EquivalenceRelation<T> {
private Map<T, Set<T>> data = new HashMap<T, Set<T>>();
@SuppressWarnings("unchecked") // See ticket #11395, this is safe.
public EquivalenceRelation<T> add(T... items) {
Set<T> group = new HashSet<T>();
for (T item : items) {
if (data.containsKey(item)) {
throw new IllegalArgumentException("All groups passed to add must be disjoint.");
}
group.add(item);
}
for (T item : items) {
data.put(item, group);
}
return this;
}
public Set<T> get(T item) {
Set<T> result = data.get(item);
if (result == null) {
return Collections.singleton(item);
}
return Collections.unmodifiableSet(result);
}
}
private Object writeReplace() throws ObjectStreamException {
return new MeasureUnitProxy(type, subType);
}
// For backward compatibility only
/**
* ISO 4217 3-letter code.
*/
private final String isoCode;
private Object readResolve() throws ObjectStreamException {
// The old isoCode field used to determine the currency.
return Currency.getInstance(isoCode);
}
}
//eof
|
googleapis/google-cloud-java | 38,178 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/ListTestCaseResultsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3/test_case.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3;
/**
*
*
* <pre>
* The response message for
* [TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse}
*/
public final class ListTestCaseResultsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse)
ListTestCaseResultsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTestCaseResultsResponse.newBuilder() to construct.
private ListTestCaseResultsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTestCaseResultsResponse() {
testCaseResults_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTestCaseResultsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3_ListTestCaseResultsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3_ListTestCaseResultsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse.class,
com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse.Builder.class);
}
public static final int TEST_CASE_RESULTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dialogflow.cx.v3.TestCaseResult> testCaseResults_;
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dialogflow.cx.v3.TestCaseResult> getTestCaseResultsList() {
return testCaseResults_;
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dialogflow.cx.v3.TestCaseResultOrBuilder>
getTestCaseResultsOrBuilderList() {
return testCaseResults_;
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
@java.lang.Override
public int getTestCaseResultsCount() {
return testCaseResults_.size();
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.TestCaseResult getTestCaseResults(int index) {
return testCaseResults_.get(index);
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.TestCaseResultOrBuilder getTestCaseResultsOrBuilder(
int index) {
return testCaseResults_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < testCaseResults_.size(); i++) {
output.writeMessage(1, testCaseResults_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < testCaseResults_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, testCaseResults_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse other =
(com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse) obj;
if (!getTestCaseResultsList().equals(other.getTestCaseResultsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTestCaseResultsCount() > 0) {
hash = (37 * hash) + TEST_CASE_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getTestCaseResultsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for
* [TestCases.ListTestCaseResults][google.cloud.dialogflow.cx.v3.TestCases.ListTestCaseResults].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse)
com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3_ListTestCaseResultsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3_ListTestCaseResultsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse.class,
com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (testCaseResultsBuilder_ == null) {
testCaseResults_ = java.util.Collections.emptyList();
} else {
testCaseResults_ = null;
testCaseResultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3_ListTestCaseResultsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse build() {
com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse buildPartial() {
com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse result =
new com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse result) {
if (testCaseResultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
testCaseResults_ = java.util.Collections.unmodifiableList(testCaseResults_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.testCaseResults_ = testCaseResults_;
} else {
result.testCaseResults_ = testCaseResultsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse other) {
if (other
== com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse.getDefaultInstance())
return this;
if (testCaseResultsBuilder_ == null) {
if (!other.testCaseResults_.isEmpty()) {
if (testCaseResults_.isEmpty()) {
testCaseResults_ = other.testCaseResults_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTestCaseResultsIsMutable();
testCaseResults_.addAll(other.testCaseResults_);
}
onChanged();
}
} else {
if (!other.testCaseResults_.isEmpty()) {
if (testCaseResultsBuilder_.isEmpty()) {
testCaseResultsBuilder_.dispose();
testCaseResultsBuilder_ = null;
testCaseResults_ = other.testCaseResults_;
bitField0_ = (bitField0_ & ~0x00000001);
testCaseResultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTestCaseResultsFieldBuilder()
: null;
} else {
testCaseResultsBuilder_.addAllMessages(other.testCaseResults_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dialogflow.cx.v3.TestCaseResult m =
input.readMessage(
com.google.cloud.dialogflow.cx.v3.TestCaseResult.parser(),
extensionRegistry);
if (testCaseResultsBuilder_ == null) {
ensureTestCaseResultsIsMutable();
testCaseResults_.add(m);
} else {
testCaseResultsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dialogflow.cx.v3.TestCaseResult> testCaseResults_ =
java.util.Collections.emptyList();
private void ensureTestCaseResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
testCaseResults_ =
new java.util.ArrayList<com.google.cloud.dialogflow.cx.v3.TestCaseResult>(
testCaseResults_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.TestCaseResult,
com.google.cloud.dialogflow.cx.v3.TestCaseResult.Builder,
com.google.cloud.dialogflow.cx.v3.TestCaseResultOrBuilder>
testCaseResultsBuilder_;
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.cx.v3.TestCaseResult>
getTestCaseResultsList() {
if (testCaseResultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(testCaseResults_);
} else {
return testCaseResultsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public int getTestCaseResultsCount() {
if (testCaseResultsBuilder_ == null) {
return testCaseResults_.size();
} else {
return testCaseResultsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public com.google.cloud.dialogflow.cx.v3.TestCaseResult getTestCaseResults(int index) {
if (testCaseResultsBuilder_ == null) {
return testCaseResults_.get(index);
} else {
return testCaseResultsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public Builder setTestCaseResults(
int index, com.google.cloud.dialogflow.cx.v3.TestCaseResult value) {
if (testCaseResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTestCaseResultsIsMutable();
testCaseResults_.set(index, value);
onChanged();
} else {
testCaseResultsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public Builder setTestCaseResults(
int index, com.google.cloud.dialogflow.cx.v3.TestCaseResult.Builder builderForValue) {
if (testCaseResultsBuilder_ == null) {
ensureTestCaseResultsIsMutable();
testCaseResults_.set(index, builderForValue.build());
onChanged();
} else {
testCaseResultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public Builder addTestCaseResults(com.google.cloud.dialogflow.cx.v3.TestCaseResult value) {
if (testCaseResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTestCaseResultsIsMutable();
testCaseResults_.add(value);
onChanged();
} else {
testCaseResultsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public Builder addTestCaseResults(
int index, com.google.cloud.dialogflow.cx.v3.TestCaseResult value) {
if (testCaseResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTestCaseResultsIsMutable();
testCaseResults_.add(index, value);
onChanged();
} else {
testCaseResultsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public Builder addTestCaseResults(
com.google.cloud.dialogflow.cx.v3.TestCaseResult.Builder builderForValue) {
if (testCaseResultsBuilder_ == null) {
ensureTestCaseResultsIsMutable();
testCaseResults_.add(builderForValue.build());
onChanged();
} else {
testCaseResultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public Builder addTestCaseResults(
int index, com.google.cloud.dialogflow.cx.v3.TestCaseResult.Builder builderForValue) {
if (testCaseResultsBuilder_ == null) {
ensureTestCaseResultsIsMutable();
testCaseResults_.add(index, builderForValue.build());
onChanged();
} else {
testCaseResultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public Builder addAllTestCaseResults(
java.lang.Iterable<? extends com.google.cloud.dialogflow.cx.v3.TestCaseResult> values) {
if (testCaseResultsBuilder_ == null) {
ensureTestCaseResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, testCaseResults_);
onChanged();
} else {
testCaseResultsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public Builder clearTestCaseResults() {
if (testCaseResultsBuilder_ == null) {
testCaseResults_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
testCaseResultsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public Builder removeTestCaseResults(int index) {
if (testCaseResultsBuilder_ == null) {
ensureTestCaseResultsIsMutable();
testCaseResults_.remove(index);
onChanged();
} else {
testCaseResultsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public com.google.cloud.dialogflow.cx.v3.TestCaseResult.Builder getTestCaseResultsBuilder(
int index) {
return getTestCaseResultsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public com.google.cloud.dialogflow.cx.v3.TestCaseResultOrBuilder getTestCaseResultsOrBuilder(
int index) {
if (testCaseResultsBuilder_ == null) {
return testCaseResults_.get(index);
} else {
return testCaseResultsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dialogflow.cx.v3.TestCaseResultOrBuilder>
getTestCaseResultsOrBuilderList() {
if (testCaseResultsBuilder_ != null) {
return testCaseResultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(testCaseResults_);
}
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public com.google.cloud.dialogflow.cx.v3.TestCaseResult.Builder addTestCaseResultsBuilder() {
return getTestCaseResultsFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.cx.v3.TestCaseResult.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public com.google.cloud.dialogflow.cx.v3.TestCaseResult.Builder addTestCaseResultsBuilder(
int index) {
return getTestCaseResultsFieldBuilder()
.addBuilder(index, com.google.cloud.dialogflow.cx.v3.TestCaseResult.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of test case results.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.TestCaseResult test_case_results = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.cx.v3.TestCaseResult.Builder>
getTestCaseResultsBuilderList() {
return getTestCaseResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.TestCaseResult,
com.google.cloud.dialogflow.cx.v3.TestCaseResult.Builder,
com.google.cloud.dialogflow.cx.v3.TestCaseResultOrBuilder>
getTestCaseResultsFieldBuilder() {
if (testCaseResultsBuilder_ == null) {
testCaseResultsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.TestCaseResult,
com.google.cloud.dialogflow.cx.v3.TestCaseResult.Builder,
com.google.cloud.dialogflow.cx.v3.TestCaseResultOrBuilder>(
testCaseResults_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
testCaseResults_ = null;
}
return testCaseResultsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse)
private static final com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse();
}
public static com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTestCaseResultsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListTestCaseResultsResponse>() {
@java.lang.Override
public ListTestCaseResultsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTestCaseResultsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTestCaseResultsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.ListTestCaseResultsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,209 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/CompareVersionsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3/version.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3;
/**
*
*
* <pre>
* The request message for
* [Versions.CompareVersions][google.cloud.dialogflow.cx.v3.Versions.CompareVersions].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.CompareVersionsRequest}
*/
public final class CompareVersionsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.CompareVersionsRequest)
CompareVersionsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CompareVersionsRequest.newBuilder() to construct.
private CompareVersionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CompareVersionsRequest() {
baseVersion_ = "";
targetVersion_ = "";
languageCode_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CompareVersionsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.VersionProto
.internal_static_google_cloud_dialogflow_cx_v3_CompareVersionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.VersionProto
.internal_static_google_cloud_dialogflow_cx_v3_CompareVersionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest.class,
com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest.Builder.class);
}
public static final int BASE_VERSION_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object baseVersion_ = "";
/**
*
*
* <pre>
* Required. Name of the base flow version to compare with the target version.
* Use version ID `0` to indicate the draft version of the specified flow.
*
* Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string base_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The baseVersion.
*/
@java.lang.Override
public java.lang.String getBaseVersion() {
java.lang.Object ref = baseVersion_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
baseVersion_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the base flow version to compare with the target version.
* Use version ID `0` to indicate the draft version of the specified flow.
*
* Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string base_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for baseVersion.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBaseVersionBytes() {
java.lang.Object ref = baseVersion_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
baseVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TARGET_VERSION_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object targetVersion_ = "";
/**
*
*
* <pre>
* Required. Name of the target flow version to compare with the
* base version. Use version ID `0` to indicate the draft version of the
* specified flow. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string target_version = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The targetVersion.
*/
@java.lang.Override
public java.lang.String getTargetVersion() {
java.lang.Object ref = targetVersion_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetVersion_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the target flow version to compare with the
* base version. Use version ID `0` to indicate the draft version of the
* specified flow. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string target_version = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for targetVersion.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTargetVersionBytes() {
java.lang.Object ref = targetVersion_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* The language to compare the flow versions for.
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/docs/reference/language) are
* supported. Note: languages must be enabled in the agent before they can be
* used.
* </pre>
*
* <code>string language_code = 3;</code>
*
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
*
*
* <pre>
* The language to compare the flow versions for.
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/docs/reference/language) are
* supported. Note: languages must be enabled in the agent before they can be
* used.
* </pre>
*
* <code>string language_code = 3;</code>
*
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(baseVersion_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, baseVersion_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetVersion_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, targetVersion_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, languageCode_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(baseVersion_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, baseVersion_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetVersion_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, targetVersion_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, languageCode_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest other =
(com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest) obj;
if (!getBaseVersion().equals(other.getBaseVersion())) return false;
if (!getTargetVersion().equals(other.getTargetVersion())) return false;
if (!getLanguageCode().equals(other.getLanguageCode())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + BASE_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getBaseVersion().hashCode();
hash = (37 * hash) + TARGET_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getTargetVersion().hashCode();
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [Versions.CompareVersions][google.cloud.dialogflow.cx.v3.Versions.CompareVersions].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.CompareVersionsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.CompareVersionsRequest)
com.google.cloud.dialogflow.cx.v3.CompareVersionsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.VersionProto
.internal_static_google_cloud_dialogflow_cx_v3_CompareVersionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.VersionProto
.internal_static_google_cloud_dialogflow_cx_v3_CompareVersionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest.class,
com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
baseVersion_ = "";
targetVersion_ = "";
languageCode_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3.VersionProto
.internal_static_google_cloud_dialogflow_cx_v3_CompareVersionsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest build() {
com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest buildPartial() {
com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest result =
new com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.baseVersion_ = baseVersion_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.targetVersion_ = targetVersion_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.languageCode_ = languageCode_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest other) {
if (other == com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest.getDefaultInstance())
return this;
if (!other.getBaseVersion().isEmpty()) {
baseVersion_ = other.baseVersion_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getTargetVersion().isEmpty()) {
targetVersion_ = other.targetVersion_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getLanguageCode().isEmpty()) {
languageCode_ = other.languageCode_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
baseVersion_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
targetVersion_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
languageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object baseVersion_ = "";
/**
*
*
* <pre>
* Required. Name of the base flow version to compare with the target version.
* Use version ID `0` to indicate the draft version of the specified flow.
*
* Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string base_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The baseVersion.
*/
public java.lang.String getBaseVersion() {
java.lang.Object ref = baseVersion_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
baseVersion_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the base flow version to compare with the target version.
* Use version ID `0` to indicate the draft version of the specified flow.
*
* Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string base_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for baseVersion.
*/
public com.google.protobuf.ByteString getBaseVersionBytes() {
java.lang.Object ref = baseVersion_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
baseVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the base flow version to compare with the target version.
* Use version ID `0` to indicate the draft version of the specified flow.
*
* Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string base_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The baseVersion to set.
* @return This builder for chaining.
*/
public Builder setBaseVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
baseVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the base flow version to compare with the target version.
* Use version ID `0` to indicate the draft version of the specified flow.
*
* Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string base_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearBaseVersion() {
baseVersion_ = getDefaultInstance().getBaseVersion();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the base flow version to compare with the target version.
* Use version ID `0` to indicate the draft version of the specified flow.
*
* Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string base_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for baseVersion to set.
* @return This builder for chaining.
*/
public Builder setBaseVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
baseVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object targetVersion_ = "";
/**
*
*
* <pre>
* Required. Name of the target flow version to compare with the
* base version. Use version ID `0` to indicate the draft version of the
* specified flow. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string target_version = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The targetVersion.
*/
public java.lang.String getTargetVersion() {
java.lang.Object ref = targetVersion_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetVersion_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the target flow version to compare with the
* base version. Use version ID `0` to indicate the draft version of the
* specified flow. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string target_version = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for targetVersion.
*/
public com.google.protobuf.ByteString getTargetVersionBytes() {
java.lang.Object ref = targetVersion_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetVersion_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the target flow version to compare with the
* base version. Use version ID `0` to indicate the draft version of the
* specified flow. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string target_version = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The targetVersion to set.
* @return This builder for chaining.
*/
public Builder setTargetVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
targetVersion_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the target flow version to compare with the
* base version. Use version ID `0` to indicate the draft version of the
* specified flow. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string target_version = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearTargetVersion() {
targetVersion_ = getDefaultInstance().getTargetVersion();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the target flow version to compare with the
* base version. Use version ID `0` to indicate the draft version of the
* specified flow. Format:
* `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>/flows/<FlowID>/versions/<VersionID>`.
* </pre>
*
* <code>
* string target_version = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for targetVersion to set.
* @return This builder for chaining.
*/
public Builder setTargetVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
targetVersion_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* The language to compare the flow versions for.
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/docs/reference/language) are
* supported. Note: languages must be enabled in the agent before they can be
* used.
* </pre>
*
* <code>string language_code = 3;</code>
*
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The language to compare the flow versions for.
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/docs/reference/language) are
* supported. Note: languages must be enabled in the agent before they can be
* used.
* </pre>
*
* <code>string language_code = 3;</code>
*
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The language to compare the flow versions for.
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/docs/reference/language) are
* supported. Note: languages must be enabled in the agent before they can be
* used.
* </pre>
*
* <code>string language_code = 3;</code>
*
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
languageCode_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The language to compare the flow versions for.
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/docs/reference/language) are
* supported. Note: languages must be enabled in the agent before they can be
* used.
* </pre>
*
* <code>string language_code = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The language to compare the flow versions for.
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/docs/reference/language) are
* supported. Note: languages must be enabled in the agent before they can be
* used.
* </pre>
*
* <code>string language_code = 3;</code>
*
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
languageCode_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.CompareVersionsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.CompareVersionsRequest)
private static final com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest();
}
public static com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CompareVersionsRequest> PARSER =
new com.google.protobuf.AbstractParser<CompareVersionsRequest>() {
@java.lang.Override
public CompareVersionsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CompareVersionsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CompareVersionsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.CompareVersionsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,106 | java-gkehub/proto-google-cloud-gkehub-v1beta/src/main/java/com/google/cloud/gkehub/v1beta/FeatureState.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/v1beta/feature.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.v1beta;
/**
*
*
* <pre>
* FeatureState describes the high-level state of a Feature. It may be used to
* describe a Feature's state at the environ-level, or per-membershop, depending
* on the context.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1beta.FeatureState}
*/
public final class FeatureState extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.v1beta.FeatureState)
FeatureStateOrBuilder {
private static final long serialVersionUID = 0L;
// Use FeatureState.newBuilder() to construct.
private FeatureState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FeatureState() {
code_ = 0;
description_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new FeatureState();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta.FeatureProto
.internal_static_google_cloud_gkehub_v1beta_FeatureState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1beta.FeatureProto
.internal_static_google_cloud_gkehub_v1beta_FeatureState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1beta.FeatureState.class,
com.google.cloud.gkehub.v1beta.FeatureState.Builder.class);
}
/**
*
*
* <pre>
* Code represents a machine-readable, high-level status of the Feature.
* </pre>
*
* Protobuf enum {@code google.cloud.gkehub.v1beta.FeatureState.Code}
*/
public enum Code implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unknown or not set.
* </pre>
*
* <code>CODE_UNSPECIFIED = 0;</code>
*/
CODE_UNSPECIFIED(0),
/**
*
*
* <pre>
* The Feature is operating normally.
* </pre>
*
* <code>OK = 1;</code>
*/
OK(1),
/**
*
*
* <pre>
* The Feature has encountered an issue, and is operating in a degraded
* state. The Feature may need intervention to return to normal operation.
* See the description and any associated Feature-specific details for more
* information.
* </pre>
*
* <code>WARNING = 2;</code>
*/
WARNING(2),
/**
*
*
* <pre>
* The Feature is not operating or is in a severely degraded state.
* The Feature may need intervention to return to normal operation.
* See the description and any associated Feature-specific details for more
* information.
* </pre>
*
* <code>ERROR = 3;</code>
*/
ERROR(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unknown or not set.
* </pre>
*
* <code>CODE_UNSPECIFIED = 0;</code>
*/
public static final int CODE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* The Feature is operating normally.
* </pre>
*
* <code>OK = 1;</code>
*/
public static final int OK_VALUE = 1;
/**
*
*
* <pre>
* The Feature has encountered an issue, and is operating in a degraded
* state. The Feature may need intervention to return to normal operation.
* See the description and any associated Feature-specific details for more
* information.
* </pre>
*
* <code>WARNING = 2;</code>
*/
public static final int WARNING_VALUE = 2;
/**
*
*
* <pre>
* The Feature is not operating or is in a severely degraded state.
* The Feature may need intervention to return to normal operation.
* See the description and any associated Feature-specific details for more
* information.
* </pre>
*
* <code>ERROR = 3;</code>
*/
public static final int ERROR_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Code valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Code forNumber(int value) {
switch (value) {
case 0:
return CODE_UNSPECIFIED;
case 1:
return OK;
case 2:
return WARNING;
case 3:
return ERROR;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Code> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Code> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Code>() {
public Code findValueByNumber(int number) {
return Code.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta.FeatureState.getDescriptor().getEnumTypes().get(0);
}
private static final Code[] VALUES = values();
public static Code valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Code(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.gkehub.v1beta.FeatureState.Code)
}
private int bitField0_;
public static final int CODE_FIELD_NUMBER = 1;
private int code_ = 0;
/**
*
*
* <pre>
* The high-level, machine-readable status of this Feature.
* </pre>
*
* <code>.google.cloud.gkehub.v1beta.FeatureState.Code code = 1;</code>
*
* @return The enum numeric value on the wire for code.
*/
@java.lang.Override
public int getCodeValue() {
return code_;
}
/**
*
*
* <pre>
* The high-level, machine-readable status of this Feature.
* </pre>
*
* <code>.google.cloud.gkehub.v1beta.FeatureState.Code code = 1;</code>
*
* @return The code.
*/
@java.lang.Override
public com.google.cloud.gkehub.v1beta.FeatureState.Code getCode() {
com.google.cloud.gkehub.v1beta.FeatureState.Code result =
com.google.cloud.gkehub.v1beta.FeatureState.Code.forNumber(code_);
return result == null ? com.google.cloud.gkehub.v1beta.FeatureState.Code.UNRECOGNIZED : result;
}
public static final int DESCRIPTION_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object description_ = "";
/**
*
*
* <pre>
* A human-readable description of the current status.
* </pre>
*
* <code>string description = 2;</code>
*
* @return The description.
*/
@java.lang.Override
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
}
}
/**
*
*
* <pre>
* A human-readable description of the current status.
* </pre>
*
* <code>string description = 2;</code>
*
* @return The bytes for description.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int UPDATE_TIME_FIELD_NUMBER = 3;
private com.google.protobuf.Timestamp updateTime_;
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*
* @return Whether the updateTime field is set.
*/
@java.lang.Override
public boolean hasUpdateTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*
* @return The updateTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getUpdateTime() {
return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_;
}
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (code_ != com.google.cloud.gkehub.v1beta.FeatureState.Code.CODE_UNSPECIFIED.getNumber()) {
output.writeEnum(1, code_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, description_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getUpdateTime());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (code_ != com.google.cloud.gkehub.v1beta.FeatureState.Code.CODE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, code_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, description_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateTime());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gkehub.v1beta.FeatureState)) {
return super.equals(obj);
}
com.google.cloud.gkehub.v1beta.FeatureState other =
(com.google.cloud.gkehub.v1beta.FeatureState) obj;
if (code_ != other.code_) return false;
if (!getDescription().equals(other.getDescription())) return false;
if (hasUpdateTime() != other.hasUpdateTime()) return false;
if (hasUpdateTime()) {
if (!getUpdateTime().equals(other.getUpdateTime())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CODE_FIELD_NUMBER;
hash = (53 * hash) + code_;
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
if (hasUpdateTime()) {
hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getUpdateTime().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1beta.FeatureState parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.gkehub.v1beta.FeatureState prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* FeatureState describes the high-level state of a Feature. It may be used to
* describe a Feature's state at the environ-level, or per-membershop, depending
* on the context.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1beta.FeatureState}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.v1beta.FeatureState)
com.google.cloud.gkehub.v1beta.FeatureStateOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1beta.FeatureProto
.internal_static_google_cloud_gkehub_v1beta_FeatureState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1beta.FeatureProto
.internal_static_google_cloud_gkehub_v1beta_FeatureState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1beta.FeatureState.class,
com.google.cloud.gkehub.v1beta.FeatureState.Builder.class);
}
// Construct using com.google.cloud.gkehub.v1beta.FeatureState.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
code_ = 0;
description_ = "";
updateTime_ = null;
if (updateTimeBuilder_ != null) {
updateTimeBuilder_.dispose();
updateTimeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.v1beta.FeatureProto
.internal_static_google_cloud_gkehub_v1beta_FeatureState_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta.FeatureState getDefaultInstanceForType() {
return com.google.cloud.gkehub.v1beta.FeatureState.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta.FeatureState build() {
com.google.cloud.gkehub.v1beta.FeatureState result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta.FeatureState buildPartial() {
com.google.cloud.gkehub.v1beta.FeatureState result =
new com.google.cloud.gkehub.v1beta.FeatureState(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.gkehub.v1beta.FeatureState result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.code_ = code_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.description_ = description_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gkehub.v1beta.FeatureState) {
return mergeFrom((com.google.cloud.gkehub.v1beta.FeatureState) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.gkehub.v1beta.FeatureState other) {
if (other == com.google.cloud.gkehub.v1beta.FeatureState.getDefaultInstance()) return this;
if (other.code_ != 0) {
setCodeValue(other.getCodeValue());
}
if (!other.getDescription().isEmpty()) {
description_ = other.description_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasUpdateTime()) {
mergeUpdateTime(other.getUpdateTime());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
code_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
description_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int code_ = 0;
/**
*
*
* <pre>
* The high-level, machine-readable status of this Feature.
* </pre>
*
* <code>.google.cloud.gkehub.v1beta.FeatureState.Code code = 1;</code>
*
* @return The enum numeric value on the wire for code.
*/
@java.lang.Override
public int getCodeValue() {
return code_;
}
/**
*
*
* <pre>
* The high-level, machine-readable status of this Feature.
* </pre>
*
* <code>.google.cloud.gkehub.v1beta.FeatureState.Code code = 1;</code>
*
* @param value The enum numeric value on the wire for code to set.
* @return This builder for chaining.
*/
public Builder setCodeValue(int value) {
code_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The high-level, machine-readable status of this Feature.
* </pre>
*
* <code>.google.cloud.gkehub.v1beta.FeatureState.Code code = 1;</code>
*
* @return The code.
*/
@java.lang.Override
public com.google.cloud.gkehub.v1beta.FeatureState.Code getCode() {
com.google.cloud.gkehub.v1beta.FeatureState.Code result =
com.google.cloud.gkehub.v1beta.FeatureState.Code.forNumber(code_);
return result == null
? com.google.cloud.gkehub.v1beta.FeatureState.Code.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* The high-level, machine-readable status of this Feature.
* </pre>
*
* <code>.google.cloud.gkehub.v1beta.FeatureState.Code code = 1;</code>
*
* @param value The code to set.
* @return This builder for chaining.
*/
public Builder setCode(com.google.cloud.gkehub.v1beta.FeatureState.Code value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
code_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The high-level, machine-readable status of this Feature.
* </pre>
*
* <code>.google.cloud.gkehub.v1beta.FeatureState.Code code = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearCode() {
bitField0_ = (bitField0_ & ~0x00000001);
code_ = 0;
onChanged();
return this;
}
private java.lang.Object description_ = "";
/**
*
*
* <pre>
* A human-readable description of the current status.
* </pre>
*
* <code>string description = 2;</code>
*
* @return The description.
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A human-readable description of the current status.
* </pre>
*
* <code>string description = 2;</code>
*
* @return The bytes for description.
*/
public com.google.protobuf.ByteString getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A human-readable description of the current status.
* </pre>
*
* <code>string description = 2;</code>
*
* @param value The description to set.
* @return This builder for chaining.
*/
public Builder setDescription(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
description_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A human-readable description of the current status.
* </pre>
*
* <code>string description = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearDescription() {
description_ = getDefaultInstance().getDescription();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A human-readable description of the current status.
* </pre>
*
* <code>string description = 2;</code>
*
* @param value The bytes for description to set.
* @return This builder for chaining.
*/
public Builder setDescriptionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
description_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.Timestamp updateTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
updateTimeBuilder_;
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*
* @return Whether the updateTime field is set.
*/
public boolean hasUpdateTime() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*
* @return The updateTime.
*/
public com.google.protobuf.Timestamp getUpdateTime() {
if (updateTimeBuilder_ == null) {
return updateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: updateTime_;
} else {
return updateTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*/
public Builder setUpdateTime(com.google.protobuf.Timestamp value) {
if (updateTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateTime_ = value;
} else {
updateTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*/
public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (updateTimeBuilder_ == null) {
updateTime_ = builderForValue.build();
} else {
updateTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*/
public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) {
if (updateTimeBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& updateTime_ != null
&& updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getUpdateTimeBuilder().mergeFrom(value);
} else {
updateTime_ = value;
}
} else {
updateTimeBuilder_.mergeFrom(value);
}
if (updateTime_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*/
public Builder clearUpdateTime() {
bitField0_ = (bitField0_ & ~0x00000004);
updateTime_ = null;
if (updateTimeBuilder_ != null) {
updateTimeBuilder_.dispose();
updateTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*/
public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getUpdateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*/
public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
if (updateTimeBuilder_ != null) {
return updateTimeBuilder_.getMessageOrBuilder();
} else {
return updateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: updateTime_;
}
}
/**
*
*
* <pre>
* The time this status and any related Feature-specific details were updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getUpdateTimeFieldBuilder() {
if (updateTimeBuilder_ == null) {
updateTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getUpdateTime(), getParentForChildren(), isClean());
updateTime_ = null;
}
return updateTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.v1beta.FeatureState)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.v1beta.FeatureState)
private static final com.google.cloud.gkehub.v1beta.FeatureState DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gkehub.v1beta.FeatureState();
}
public static com.google.cloud.gkehub.v1beta.FeatureState getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FeatureState> PARSER =
new com.google.protobuf.AbstractParser<FeatureState>() {
@java.lang.Override
public FeatureState parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<FeatureState> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FeatureState> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.v1beta.FeatureState getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,155 | java-secretmanager/proto-google-cloud-secretmanager-v1beta2/src/main/java/com/google/cloud/secretmanager/v1beta2/ListSecretsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/secretmanager/v1beta2/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.secretmanager.v1beta2;
/**
*
*
* <pre>
* Request message for
* [SecretManagerService.ListSecrets][google.cloud.secretmanager.v1beta2.SecretManagerService.ListSecrets].
* </pre>
*
* Protobuf type {@code google.cloud.secretmanager.v1beta2.ListSecretsRequest}
*/
public final class ListSecretsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.secretmanager.v1beta2.ListSecretsRequest)
ListSecretsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSecretsRequest.newBuilder() to construct.
private ListSecretsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSecretsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSecretsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.secretmanager.v1beta2.ServiceProto
.internal_static_google_cloud_secretmanager_v1beta2_ListSecretsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.secretmanager.v1beta2.ServiceProto
.internal_static_google_cloud_secretmanager_v1beta2_ListSecretsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.secretmanager.v1beta2.ListSecretsRequest.class,
com.google.cloud.secretmanager.v1beta2.ListSecretsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the project associated with the
* [Secrets][google.cloud.secretmanager.v1beta2.Secret], in the format
* `projects/*` or `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the project associated with the
* [Secrets][google.cloud.secretmanager.v1beta2.Secret], in the format
* `projects/*` or `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of results to be returned in a single page. If
* set to 0, the server decides the number of results to return. If the
* number is greater than 25000, it is capped at 25000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Pagination token, returned earlier via
* [ListSecretsResponse.next_page_token][google.cloud.secretmanager.v1beta2.ListSecretsResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Pagination token, returned earlier via
* [ListSecretsResponse.next_page_token][google.cloud.secretmanager.v1beta2.ListSecretsResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter string, adhering to the rules in
* [List-operation
* filtering](https://cloud.google.com/secret-manager/docs/filtering). List
* only secrets matching the filter. If filter is empty, all secrets are
* listed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Filter string, adhering to the rules in
* [List-operation
* filtering](https://cloud.google.com/secret-manager/docs/filtering). List
* only secrets matching the filter. If filter is empty, all secrets are
* listed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.secretmanager.v1beta2.ListSecretsRequest)) {
return super.equals(obj);
}
com.google.cloud.secretmanager.v1beta2.ListSecretsRequest other =
(com.google.cloud.secretmanager.v1beta2.ListSecretsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.secretmanager.v1beta2.ListSecretsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [SecretManagerService.ListSecrets][google.cloud.secretmanager.v1beta2.SecretManagerService.ListSecrets].
* </pre>
*
* Protobuf type {@code google.cloud.secretmanager.v1beta2.ListSecretsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.secretmanager.v1beta2.ListSecretsRequest)
com.google.cloud.secretmanager.v1beta2.ListSecretsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.secretmanager.v1beta2.ServiceProto
.internal_static_google_cloud_secretmanager_v1beta2_ListSecretsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.secretmanager.v1beta2.ServiceProto
.internal_static_google_cloud_secretmanager_v1beta2_ListSecretsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.secretmanager.v1beta2.ListSecretsRequest.class,
com.google.cloud.secretmanager.v1beta2.ListSecretsRequest.Builder.class);
}
// Construct using com.google.cloud.secretmanager.v1beta2.ListSecretsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.secretmanager.v1beta2.ServiceProto
.internal_static_google_cloud_secretmanager_v1beta2_ListSecretsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.secretmanager.v1beta2.ListSecretsRequest getDefaultInstanceForType() {
return com.google.cloud.secretmanager.v1beta2.ListSecretsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.secretmanager.v1beta2.ListSecretsRequest build() {
com.google.cloud.secretmanager.v1beta2.ListSecretsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.secretmanager.v1beta2.ListSecretsRequest buildPartial() {
com.google.cloud.secretmanager.v1beta2.ListSecretsRequest result =
new com.google.cloud.secretmanager.v1beta2.ListSecretsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.secretmanager.v1beta2.ListSecretsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.secretmanager.v1beta2.ListSecretsRequest) {
return mergeFrom((com.google.cloud.secretmanager.v1beta2.ListSecretsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.secretmanager.v1beta2.ListSecretsRequest other) {
if (other == com.google.cloud.secretmanager.v1beta2.ListSecretsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the project associated with the
* [Secrets][google.cloud.secretmanager.v1beta2.Secret], in the format
* `projects/*` or `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the project associated with the
* [Secrets][google.cloud.secretmanager.v1beta2.Secret], in the format
* `projects/*` or `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the project associated with the
* [Secrets][google.cloud.secretmanager.v1beta2.Secret], in the format
* `projects/*` or `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the project associated with the
* [Secrets][google.cloud.secretmanager.v1beta2.Secret], in the format
* `projects/*` or `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the project associated with the
* [Secrets][google.cloud.secretmanager.v1beta2.Secret], in the format
* `projects/*` or `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of results to be returned in a single page. If
* set to 0, the server decides the number of results to return. If the
* number is greater than 25000, it is capped at 25000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of results to be returned in a single page. If
* set to 0, the server decides the number of results to return. If the
* number is greater than 25000, it is capped at 25000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of results to be returned in a single page. If
* set to 0, the server decides the number of results to return. If the
* number is greater than 25000, it is capped at 25000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. Pagination token, returned earlier via
* [ListSecretsResponse.next_page_token][google.cloud.secretmanager.v1beta2.ListSecretsResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Pagination token, returned earlier via
* [ListSecretsResponse.next_page_token][google.cloud.secretmanager.v1beta2.ListSecretsResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Pagination token, returned earlier via
* [ListSecretsResponse.next_page_token][google.cloud.secretmanager.v1beta2.ListSecretsResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Pagination token, returned earlier via
* [ListSecretsResponse.next_page_token][google.cloud.secretmanager.v1beta2.ListSecretsResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Pagination token, returned earlier via
* [ListSecretsResponse.next_page_token][google.cloud.secretmanager.v1beta2.ListSecretsResponse.next_page_token].
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter string, adhering to the rules in
* [List-operation
* filtering](https://cloud.google.com/secret-manager/docs/filtering). List
* only secrets matching the filter. If filter is empty, all secrets are
* listed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter string, adhering to the rules in
* [List-operation
* filtering](https://cloud.google.com/secret-manager/docs/filtering). List
* only secrets matching the filter. If filter is empty, all secrets are
* listed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter string, adhering to the rules in
* [List-operation
* filtering](https://cloud.google.com/secret-manager/docs/filtering). List
* only secrets matching the filter. If filter is empty, all secrets are
* listed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter string, adhering to the rules in
* [List-operation
* filtering](https://cloud.google.com/secret-manager/docs/filtering). List
* only secrets matching the filter. If filter is empty, all secrets are
* listed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter string, adhering to the rules in
* [List-operation
* filtering](https://cloud.google.com/secret-manager/docs/filtering). List
* only secrets matching the filter. If filter is empty, all secrets are
* listed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.secretmanager.v1beta2.ListSecretsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.secretmanager.v1beta2.ListSecretsRequest)
private static final com.google.cloud.secretmanager.v1beta2.ListSecretsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.secretmanager.v1beta2.ListSecretsRequest();
}
public static com.google.cloud.secretmanager.v1beta2.ListSecretsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSecretsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListSecretsRequest>() {
@java.lang.Override
public ListSecretsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSecretsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSecretsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.secretmanager.v1beta2.ListSecretsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/giraph | 38,114 | giraph-core/src/main/java/org/apache/giraph/conf/GiraphConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.conf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.buffer.UnpooledByteBufAllocator;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.apache.giraph.aggregators.AggregatorWriter;
import org.apache.giraph.bsp.checkpoints.CheckpointSupportedChecker;
import org.apache.giraph.combiner.MessageCombiner;
import org.apache.giraph.edge.OutEdges;
import org.apache.giraph.edge.ReuseObjectsOutEdges;
import org.apache.giraph.factories.ComputationFactory;
import org.apache.giraph.factories.VertexValueFactory;
import org.apache.giraph.graph.Computation;
import org.apache.giraph.graph.MapperObserver;
import org.apache.giraph.graph.Vertex;
import org.apache.giraph.graph.VertexResolver;
import org.apache.giraph.graph.VertexValueCombiner;
import org.apache.giraph.io.EdgeInputFormat;
import org.apache.giraph.io.EdgeOutputFormat;
import org.apache.giraph.io.MappingInputFormat;
import org.apache.giraph.io.VertexInputFormat;
import org.apache.giraph.io.VertexOutputFormat;
import org.apache.giraph.io.filters.EdgeInputFilter;
import org.apache.giraph.io.filters.VertexInputFilter;
import org.apache.giraph.job.GiraphJobObserver;
import org.apache.giraph.job.GiraphJobRetryChecker;
import org.apache.giraph.master.MasterCompute;
import org.apache.giraph.master.MasterObserver;
import org.apache.giraph.partition.GraphPartitionerFactory;
import org.apache.giraph.partition.Partition;
import org.apache.giraph.partition.ReusesObjectsPartition;
import org.apache.giraph.utils.GcObserver;
import org.apache.giraph.utils.ReflectionUtils;
import org.apache.giraph.worker.WorkerContext;
import org.apache.giraph.worker.WorkerObserver;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.net.DNS;
/**
* Adds user methods specific to Giraph. This will be put into an
* ImmutableClassesGiraphConfiguration that provides the configuration plus
* the immutable classes.
*
* Keeps track of parameters which were set so it easily set them in another
* copy of configuration.
*/
public class GiraphConfiguration extends Configuration
implements GiraphConstants {
/** ByteBufAllocator to be used by netty */
private ByteBufAllocator nettyBufferAllocator = null;
/**
* Constructor that creates the configuration
*/
public GiraphConfiguration() {
configureHadoopSecurity();
}
/**
* Constructor.
*
* @param conf Configuration
*/
public GiraphConfiguration(Configuration conf) {
super(conf);
configureHadoopSecurity();
}
/**
* Get name of computation being run. We leave this up to the
* {@link ComputationFactory} to decide what to return.
*
* @return Name of computation being run
*/
public String getComputationName() {
ComputationFactory compFactory = ReflectionUtils.newInstance(
getComputationFactoryClass());
return compFactory.computationName(this);
}
/**
* Get the user's subclassed {@link ComputationFactory}
*
* @return User's computation factory class
*/
public Class<? extends ComputationFactory> getComputationFactoryClass() {
return COMPUTATION_FACTORY_CLASS.get(this);
}
/**
* Get the user's subclassed {@link Computation}
*
* @return User's computation class
*/
public Class<? extends Computation> getComputationClass() {
return COMPUTATION_CLASS.get(this);
}
/**
* Set the computation class (required)
*
* @param computationClass Runs vertex computation
*/
public void setComputationClass(
Class<? extends Computation> computationClass) {
COMPUTATION_CLASS.set(this, computationClass);
}
/**
* Set the vertex value factory class
*
* @param vertexValueFactoryClass Creates default vertex values
*/
public final void setVertexValueFactoryClass(
Class<? extends VertexValueFactory> vertexValueFactoryClass) {
VERTEX_VALUE_FACTORY_CLASS.set(this, vertexValueFactoryClass);
}
/**
* Set the edge input filter class
*
* @param edgeFilterClass class to use
*/
public void setEdgeInputFilterClass(
Class<? extends EdgeInputFilter> edgeFilterClass) {
EDGE_INPUT_FILTER_CLASS.set(this, edgeFilterClass);
}
/**
* Set the vertex input filter class
*
* @param vertexFilterClass class to use
*/
public void setVertexInputFilterClass(
Class<? extends VertexInputFilter> vertexFilterClass) {
VERTEX_INPUT_FILTER_CLASS.set(this, vertexFilterClass);
}
/**
* Get the vertex edges class
*
* @return vertex edges class
*/
public Class<? extends OutEdges> getOutEdgesClass() {
return VERTEX_EDGES_CLASS.get(this);
}
/**
* Set the vertex edges class
*
* @param outEdgesClass Determines the way edges are stored
*/
public final void setOutEdgesClass(
Class<? extends OutEdges> outEdgesClass) {
VERTEX_EDGES_CLASS.set(this, outEdgesClass);
}
/**
* Set the vertex implementation class
*
* @param vertexClass class of the vertex implementation
*/
public final void setVertexClass(Class<? extends Vertex> vertexClass) {
VERTEX_CLASS.set(this, vertexClass);
}
/**
* Set the vertex edges class used during edge-based input (if different
* from the one used during computation)
*
* @param inputOutEdgesClass Determines the way edges are stored
*/
public final void setInputOutEdgesClass(
Class<? extends OutEdges> inputOutEdgesClass) {
INPUT_VERTEX_EDGES_CLASS.set(this, inputOutEdgesClass);
}
/**
* True if the {@link org.apache.giraph.edge.OutEdges} implementation
* copies the passed edges to its own data structure,
* i.e. it doesn't keep references to Edge objects, target vertex ids or edge
* values passed to add() or initialize().
* This makes it possible to reuse edge objects passed to the data
* structure, to minimize object instantiation (see for example
* EdgeStore#addPartitionEdges()).
*
* @return True iff we can reuse the edge objects
*/
public boolean reuseEdgeObjects() {
return ReuseObjectsOutEdges.class.isAssignableFrom(
getOutEdgesClass());
}
/**
* True if the {@link Partition} implementation copies the passed vertices
* to its own data structure, i.e. it doesn't keep references to Vertex
* objects passed to it.
* This makes it possible to reuse vertex objects passed to the data
* structure, to minimize object instantiation.
*
* @return True iff we can reuse the vertex objects
*/
public boolean reuseVertexObjects() {
return ReusesObjectsPartition.class.isAssignableFrom(getPartitionClass());
}
/**
* Get Partition class used
* @return Partition class
*/
public Class<? extends Partition> getPartitionClass() {
return PARTITION_CLASS.get(this);
}
/**
* Does the job have a {@link VertexInputFormat}?
*
* @return True iff a {@link VertexInputFormat} has been specified.
*/
public boolean hasVertexInputFormat() {
return VERTEX_INPUT_FORMAT_CLASS.get(this) != null;
}
/**
* Set the vertex input format class (required)
*
* @param vertexInputFormatClass Determines how graph is input
*/
public void setVertexInputFormatClass(
Class<? extends VertexInputFormat> vertexInputFormatClass) {
VERTEX_INPUT_FORMAT_CLASS.set(this, vertexInputFormatClass);
}
/**
* Does the job have a {@link EdgeInputFormat}?
*
* @return True iff a {@link EdgeInputFormat} has been specified.
*/
public boolean hasEdgeInputFormat() {
return EDGE_INPUT_FORMAT_CLASS.get(this) != null;
}
/**
* Set the edge input format class (required)
*
* @param edgeInputFormatClass Determines how graph is input
*/
public void setEdgeInputFormatClass(
Class<? extends EdgeInputFormat> edgeInputFormatClass) {
EDGE_INPUT_FORMAT_CLASS.set(this, edgeInputFormatClass);
}
/**
* Set the mapping input format class (optional)
*
* @param mappingInputFormatClass Determines how mappings are input
*/
public void setMappingInputFormatClass(
Class<? extends MappingInputFormat> mappingInputFormatClass) {
MAPPING_INPUT_FORMAT_CLASS.set(this, mappingInputFormatClass);
}
/**
* Set the master class (optional)
*
* @param masterComputeClass Runs master computation
*/
public final void setMasterComputeClass(
Class<? extends MasterCompute> masterComputeClass) {
MASTER_COMPUTE_CLASS.set(this, masterComputeClass);
}
/**
* Add a MasterObserver class (optional)
*
* @param masterObserverClass MasterObserver class to add.
*/
public final void addMasterObserverClass(
Class<? extends MasterObserver> masterObserverClass) {
MASTER_OBSERVER_CLASSES.add(this, masterObserverClass);
}
/**
* Add a WorkerObserver class (optional)
*
* @param workerObserverClass WorkerObserver class to add.
*/
public final void addWorkerObserverClass(
Class<? extends WorkerObserver> workerObserverClass) {
WORKER_OBSERVER_CLASSES.add(this, workerObserverClass);
}
/**
* Add a MapperObserver class (optional)
*
* @param mapperObserverClass MapperObserver class to add.
*/
public final void addMapperObserverClass(
Class<? extends MapperObserver> mapperObserverClass) {
MAPPER_OBSERVER_CLASSES.add(this, mapperObserverClass);
}
/**
* Add a GcObserver class (optional)
*
* @param gcObserverClass GcObserver class to add.
*/
public final void addGcObserverClass(
Class<? extends GcObserver> gcObserverClass) {
GC_OBSERVER_CLASSES.add(this, gcObserverClass);
}
/**
* Get job observer class
*
* @return GiraphJobObserver class set.
*/
public Class<? extends GiraphJobObserver> getJobObserverClass() {
return JOB_OBSERVER_CLASS.get(this);
}
/**
* Set job observer class
*
* @param klass GiraphJobObserver class to set.
*/
public void setJobObserverClass(Class<? extends GiraphJobObserver> klass) {
JOB_OBSERVER_CLASS.set(this, klass);
}
/**
* Get job retry checker class
*
* @return GiraphJobRetryChecker class set.
*/
public Class<? extends GiraphJobRetryChecker> getJobRetryCheckerClass() {
return JOB_RETRY_CHECKER_CLASS.get(this);
}
/**
* Set job retry checker class
*
* @param klass GiraphJobRetryChecker class to set.
*/
public void setJobRetryCheckerClass(
Class<? extends GiraphJobRetryChecker> klass) {
JOB_RETRY_CHECKER_CLASS.set(this, klass);
}
/**
* Check whether to enable jmap dumping thread.
*
* @return true if jmap dumper is enabled.
*/
public boolean isJMapHistogramDumpEnabled() {
return JMAP_ENABLE.get(this);
}
/**
* Check whether to enable heap memory supervisor thread
*
* @return true if jmap dumper is reactively enabled
*/
public boolean isReactiveJmapHistogramDumpEnabled() {
return REACTIVE_JMAP_ENABLE.get(this);
}
/**
* Set mapping from a key name to a list of classes.
*
* @param name String key name to use.
* @param xface interface of the classes being set.
* @param klasses Classes to set.
*/
public final void setClasses(String name, Class<?> xface,
Class<?> ... klasses) {
String[] klassNames = new String[klasses.length];
for (int i = 0; i < klasses.length; ++i) {
Class<?> klass = klasses[i];
if (!xface.isAssignableFrom(klass)) {
throw new RuntimeException(klass + " does not implement " +
xface.getName());
}
klassNames[i] = klasses[i].getName();
}
setStrings(name, klassNames);
}
/**
* Does the job have a {@link VertexOutputFormat}?
*
* @return True iff a {@link VertexOutputFormat} has been specified.
*/
public boolean hasVertexOutputFormat() {
return VERTEX_OUTPUT_FORMAT_CLASS.get(this) != null;
}
/**
* Set the vertex output format class (optional)
*
* @param vertexOutputFormatClass Determines how graph is output
*/
public final void setVertexOutputFormatClass(
Class<? extends VertexOutputFormat> vertexOutputFormatClass) {
VERTEX_OUTPUT_FORMAT_CLASS.set(this, vertexOutputFormatClass);
}
/**
* Does the job have a {@link EdgeOutputFormat} subdir?
*
* @return True iff a {@link EdgeOutputFormat} subdir has been specified.
*/
public boolean hasVertexOutputFormatSubdir() {
return !VERTEX_OUTPUT_FORMAT_SUBDIR.get(this).isEmpty();
}
/**
* Set the vertex output format path
*
* @param path path where the verteces will be written
*/
public final void setVertexOutputFormatSubdir(String path) {
VERTEX_OUTPUT_FORMAT_SUBDIR.set(this, path);
}
/**
* Check if output should be done during computation
*
* @return True iff output should be done during computation
*/
public final boolean doOutputDuringComputation() {
return DO_OUTPUT_DURING_COMPUTATION.get(this);
}
/**
* Set whether or not we should do output during computation
*
* @param doOutputDuringComputation True iff we want output to happen
* during computation
*/
public final void setDoOutputDuringComputation(
boolean doOutputDuringComputation) {
DO_OUTPUT_DURING_COMPUTATION.set(this, doOutputDuringComputation);
}
/**
* Check if VertexOutputFormat is thread-safe
*
* @return True iff VertexOutputFormat is thread-safe
*/
public final boolean vertexOutputFormatThreadSafe() {
return VERTEX_OUTPUT_FORMAT_THREAD_SAFE.get(this);
}
/**
* Set whether or not selected VertexOutputFormat is thread-safe
*
* @param vertexOutputFormatThreadSafe True iff selected VertexOutputFormat
* is thread-safe
*/
public final void setVertexOutputFormatThreadSafe(
boolean vertexOutputFormatThreadSafe) {
VERTEX_OUTPUT_FORMAT_THREAD_SAFE.set(this, vertexOutputFormatThreadSafe);
}
/**
* Does the job have a {@link EdgeOutputFormat}?
*
* @return True iff a {@link EdgeOutputFormat} has been specified.
*/
public boolean hasEdgeOutputFormat() {
return EDGE_OUTPUT_FORMAT_CLASS.get(this) != null;
}
/**
* Set the edge output format class (optional)
*
* @param edgeOutputFormatClass Determines how graph is output
*/
public final void setEdgeOutputFormatClass(
Class<? extends EdgeOutputFormat> edgeOutputFormatClass) {
EDGE_OUTPUT_FORMAT_CLASS.set(this, edgeOutputFormatClass);
}
/**
* Does the job have a {@link EdgeOutputFormat} subdir?
*
* @return True iff a {@link EdgeOutputFormat} subdir has been specified.
*/
public boolean hasEdgeOutputFormatSubdir() {
return !EDGE_OUTPUT_FORMAT_SUBDIR.get(this).isEmpty();
}
/**
* Set the edge output format path
*
* @param path path where the edges will be written
*/
public final void setEdgeOutputFormatSubdir(String path) {
EDGE_OUTPUT_FORMAT_SUBDIR.set(this, path);
}
/**
* Get the number of threads to use for writing output in the end of the
* application. If output format is not thread safe, returns 1.
*
* @return Number of output threads
*/
public final int getNumOutputThreads() {
if (!vertexOutputFormatThreadSafe()) {
return 1;
} else {
return NUM_OUTPUT_THREADS.get(this);
}
}
/**
* Set the number of threads to use for writing output in the end of the
* application. Will be used only if {#vertexOutputFormatThreadSafe} is true.
*
* @param numOutputThreads Number of output threads
*/
public void setNumOutputThreads(int numOutputThreads) {
NUM_OUTPUT_THREADS.set(this, numOutputThreads);
}
/**
* Set the message combiner class (optional)
*
* @param messageCombinerClass Determines how vertex messages are combined
*/
public void setMessageCombinerClass(
Class<? extends MessageCombiner> messageCombinerClass) {
MESSAGE_COMBINER_CLASS.set(this, messageCombinerClass);
}
/**
* Set the graph partitioner class (optional)
*
* @param graphPartitionerFactoryClass Determines how the graph is partitioned
*/
public final void setGraphPartitionerFactoryClass(
Class<? extends GraphPartitionerFactory> graphPartitionerFactoryClass) {
GRAPH_PARTITIONER_FACTORY_CLASS.set(this, graphPartitionerFactoryClass);
}
/**
* Set the vertex resolver class (optional)
*
* @param vertexResolverClass Determines how vertex mutations are resolved
*/
public final void setVertexResolverClass(
Class<? extends VertexResolver> vertexResolverClass) {
VERTEX_RESOLVER_CLASS.set(this, vertexResolverClass);
}
/**
* Whether to create a vertex that doesn't exist when it receives messages.
* This only affects DefaultVertexResolver.
*
* @return true if we should create non existent vertices that get messages.
*/
public final boolean getResolverCreateVertexOnMessages() {
return RESOLVER_CREATE_VERTEX_ON_MSGS.get(this);
}
/**
* Set whether to create non existent vertices when they receive messages.
*
* @param v true if we should create vertices when they get messages.
*/
public final void setResolverCreateVertexOnMessages(boolean v) {
RESOLVER_CREATE_VERTEX_ON_MSGS.set(this, v);
}
/**
* Set the vertex value combiner class (optional)
*
* @param vertexValueCombinerClass Determines how vertices are combined
*/
public final void setVertexValueCombinerClass(
Class<? extends VertexValueCombiner> vertexValueCombinerClass) {
VERTEX_VALUE_COMBINER_CLASS.set(this, vertexValueCombinerClass);
}
/**
* Set the worker context class (optional)
*
* @param workerContextClass Determines what code is executed on a each
* worker before and after each superstep and computation
*/
public final void setWorkerContextClass(
Class<? extends WorkerContext> workerContextClass) {
WORKER_CONTEXT_CLASS.set(this, workerContextClass);
}
/**
* Set the aggregator writer class (optional)
*
* @param aggregatorWriterClass Determines how the aggregators are
* written to file at the end of the job
*/
public final void setAggregatorWriterClass(
Class<? extends AggregatorWriter> aggregatorWriterClass) {
AGGREGATOR_WRITER_CLASS.set(this, aggregatorWriterClass);
}
/**
* Set the partition class (optional)
*
* @param partitionClass Determines the why partitions are stored
*/
public final void setPartitionClass(
Class<? extends Partition> partitionClass) {
PARTITION_CLASS.set(this, partitionClass);
}
/**
* Set worker configuration for determining what is required for
* a superstep.
*
* @param minWorkers Minimum workers to do a superstep
* @param maxWorkers Maximum workers to do a superstep
* (max map tasks in job)
* @param minPercentResponded 0 - 100 % of the workers required to
* have responded before continuing the superstep
*/
public final void setWorkerConfiguration(int minWorkers,
int maxWorkers,
float minPercentResponded) {
setInt(MIN_WORKERS, minWorkers);
setInt(MAX_WORKERS, maxWorkers);
MIN_PERCENT_RESPONDED.set(this, minPercentResponded);
}
public final int getMinWorkers() {
return getInt(MIN_WORKERS, -1);
}
public final int getMaxWorkers() {
return getInt(MAX_WORKERS, -1);
}
public final float getMinPercentResponded() {
return MIN_PERCENT_RESPONDED.get(this);
}
/**
* How many mappers is job asking for, taking into account whether master
* is running on the same mapper as worker or not
*
* @return How many mappers is job asking for
*/
public final int getMaxMappers() {
return getMaxWorkers() + (SPLIT_MASTER_WORKER.get(this) ? 1 : 0);
}
/**
* Utilize an existing ZooKeeper service. If this is not set, ZooKeeper
* will be dynamically started by Giraph for this job.
*
* @param serverList Comma separated list of servers and ports
* (i.e. zk1:2221,zk2:2221)
*/
public final void setZooKeeperConfiguration(String serverList) {
ZOOKEEPER_LIST.set(this, serverList);
}
/**
* Getter for SPLIT_MASTER_WORKER flag.
*
* @return boolean flag value.
*/
public final boolean getSplitMasterWorker() {
return SPLIT_MASTER_WORKER.get(this);
}
/**
* Get array of MasterObserver classes set in the configuration.
*
* @return array of MasterObserver classes.
*/
public Class<? extends MasterObserver>[] getMasterObserverClasses() {
return MASTER_OBSERVER_CLASSES.getArray(this);
}
/**
* Get array of WorkerObserver classes set in configuration.
*
* @return array of WorkerObserver classes.
*/
public Class<? extends WorkerObserver>[] getWorkerObserverClasses() {
return WORKER_OBSERVER_CLASSES.getArray(this);
}
/**
* Get array of MapperObserver classes set in configuration.
*
* @return array of MapperObserver classes.
*/
public Class<? extends MapperObserver>[] getMapperObserverClasses() {
return MAPPER_OBSERVER_CLASSES.getArray(this);
}
/**
* Get array of GcObserver classes set in configuration.
*
* @return array of GcObserver classes.
*/
public Class<? extends GcObserver>[] getGcObserverClasses() {
return GC_OBSERVER_CLASSES.getArray(this);
}
/**
* Whether to track, print, and aggregate metrics.
*
* @return true if metrics are enabled, false otherwise (default)
*/
public boolean metricsEnabled() {
return METRICS_ENABLE.isTrue(this);
}
/**
* Get the task partition
*
* @return The task partition or -1 if not set
*/
public int getTaskPartition() {
return getInt("mapred.task.partition", -1);
}
/**
* Is this a "pure YARN" Giraph job, or is a MapReduce layer (v1 or v2)
* actually managing our cluster nodes, i.e. each task is a Mapper.
*
* @return TRUE if this is a pure YARN job.
*/
public boolean isPureYarnJob() {
return IS_PURE_YARN_JOB.get(this);
}
/**
* Jars required in "Pure YARN" jobs (names only, no paths) should
* be listed here in full, including Giraph framework jar(s).
*
* @return the comma-separated list of jar names for export to cluster.
*/
public String getYarnLibJars() {
return GIRAPH_YARN_LIBJARS.get(this);
}
/**
* Populate jar list for Pure YARN jobs.
*
* @param jarList a comma-separated list of jar names
*/
public void setYarnLibJars(String jarList) {
GIRAPH_YARN_LIBJARS.set(this, jarList);
}
/**
* Get heap size (in MB) for each task in our Giraph job run,
* assuming this job will run on the "pure YARN" profile.
*
* @return the heap size for all tasks, in MB
*/
public int getYarnTaskHeapMb() {
return GIRAPH_YARN_TASK_HEAP_MB.get(this);
}
/**
* Set heap size for Giraph tasks in our job run, assuming
* the job will run on the "pure YARN" profile.
*
* @param heapMb the heap size for all tasks
*/
public void setYarnTaskHeapMb(int heapMb) {
GIRAPH_YARN_TASK_HEAP_MB.set(this, heapMb);
}
/**
* Get the ZooKeeper list.
*
* @return ZooKeeper list of strings, comma separated or null if none set.
*/
public String getZookeeperList() {
return ZOOKEEPER_LIST.get(this);
}
/**
* Set the ZooKeeper list to the provided list. This method is used when the
* ZooKeeper is started internally and will set the zkIsExternal option to
* false as well.
*
* @param zkList list of strings, comma separated of zookeeper servers
*/
public void setZookeeperList(String zkList) {
ZOOKEEPER_LIST.set(this, zkList);
ZOOKEEPER_IS_EXTERNAL.set(this, false);
}
/**
* Was ZooKeeper provided externally?
*
* @return true iff was zookeeper is external
*/
public boolean isZookeeperExternal() {
return ZOOKEEPER_IS_EXTERNAL.get(this);
}
public String getLocalLevel() {
return LOG_LEVEL.get(this);
}
/**
* Use the log thread layout option?
*
* @return True if use the log thread layout option, false otherwise
*/
public boolean useLogThreadLayout() {
return LOG_THREAD_LAYOUT.get(this);
}
/**
* is this job run a local test?
*
* @return the test status as recorded in the Configuration
*/
public boolean getLocalTestMode() {
return LOCAL_TEST_MODE.get(this);
}
/**
* Flag this job as a local test run.
*
* @param flag the test status for this job
*/
public void setLocalTestMode(boolean flag) {
LOCAL_TEST_MODE.set(this, flag);
}
public int getZooKeeperSessionTimeout() {
return ZOOKEEPER_SESSION_TIMEOUT.get(this);
}
public int getZookeeperOpsMaxAttempts() {
return ZOOKEEPER_OPS_MAX_ATTEMPTS.get(this);
}
public int getZookeeperOpsRetryWaitMsecs() {
return ZOOKEEPER_OPS_RETRY_WAIT_MSECS.get(this);
}
public boolean getNettyServerUseExecutionHandler() {
return NETTY_SERVER_USE_EXECUTION_HANDLER.get(this);
}
public int getNettyServerThreads() {
return NETTY_SERVER_THREADS.get(this);
}
public int getNettyServerExecutionThreads() {
return NETTY_SERVER_EXECUTION_THREADS.get(this);
}
/**
* Get the netty server execution concurrency. This depends on whether the
* netty server execution handler exists.
*
* @return Server concurrency
*/
public int getNettyServerExecutionConcurrency() {
if (getNettyServerUseExecutionHandler()) {
return getNettyServerExecutionThreads();
} else {
return getNettyServerThreads();
}
}
/**
* Used by netty client and server to create ByteBufAllocator
*
* @return ByteBufAllocator
*/
public ByteBufAllocator getNettyAllocator() {
if (nettyBufferAllocator == null) {
if (NETTY_USE_POOLED_ALLOCATOR.get(this)) { // Use pooled allocator
nettyBufferAllocator = new PooledByteBufAllocator(
NETTY_USE_DIRECT_MEMORY.get(this));
} else { // Use un-pooled allocator
// Note: Current default settings create un-pooled heap allocator
nettyBufferAllocator = new UnpooledByteBufAllocator(
NETTY_USE_DIRECT_MEMORY.get(this));
}
}
return nettyBufferAllocator;
}
public int getZookeeperConnectionAttempts() {
return ZOOKEEPER_CONNECTION_ATTEMPTS.get(this);
}
public int getZooKeeperMinSessionTimeout() {
return ZOOKEEPER_MIN_SESSION_TIMEOUT.get(this);
}
public int getZooKeeperMaxSessionTimeout() {
return ZOOKEEPER_MAX_SESSION_TIMEOUT.get(this);
}
/**
* Get the number of map tasks in this job
*
* @return Number of map tasks in this job
*/
public int getMapTasks() {
int mapTasks = getInt("mapred.map.tasks", -1);
if (mapTasks == -1) {
throw new IllegalStateException("getMapTasks: Failed to get the map " +
"tasks!");
}
return mapTasks;
}
/**
* Use authentication? (if supported)
*
* @return True if should authenticate, false otherwise
*/
public boolean authenticate() {
return AUTHENTICATE.get(this);
}
/**
* Use authentication? (if supported)
*
* @return True if should authenticate, false otherwise
*/
public boolean sslAuthenticate() {
return SSL_ENCRYPT.get(this);
}
/**
* Set the number of compute threads
*
* @param numComputeThreads Number of compute threads to use
*/
public void setNumComputeThreads(int numComputeThreads) {
NUM_COMPUTE_THREADS.set(this, numComputeThreads);
}
public int getNumComputeThreads() {
return NUM_COMPUTE_THREADS.get(this);
}
/**
* Set the number of input split threads
*
* @param numInputSplitsThreads Number of input split threads to use
*/
public void setNumInputSplitsThreads(int numInputSplitsThreads) {
NUM_INPUT_THREADS.set(this, numInputSplitsThreads);
}
public int getNumInputSplitsThreads() {
return NUM_INPUT_THREADS.get(this);
}
public long getInputSplitMaxVertices() {
return INPUT_SPLIT_MAX_VERTICES.get(this);
}
public long getInputSplitMaxEdges() {
return INPUT_SPLIT_MAX_EDGES.get(this);
}
/**
* Set whether to use unsafe serialization
*
* @param useUnsafeSerialization If true, use unsafe serialization
*/
public void useUnsafeSerialization(boolean useUnsafeSerialization) {
USE_UNSAFE_SERIALIZATION.set(this, useUnsafeSerialization);
}
/**
* Set the checkpoint frequeuncy of how many supersteps to wait before
* checkpointing
*
* @param checkpointFrequency How often to checkpoint (0 means never)
*/
public void setCheckpointFrequency(int checkpointFrequency) {
CHECKPOINT_FREQUENCY.set(this, checkpointFrequency);
}
/**
* Get the checkpoint frequeuncy of how many supersteps to wait
* before checkpointing
*
* @return Checkpoint frequency (0 means never)
*/
public int getCheckpointFrequency() {
return CHECKPOINT_FREQUENCY.get(this);
}
/**
* Check if checkpointing is used
*
* @return True iff checkpointing is used
*/
public boolean useCheckpointing() {
return getCheckpointFrequency() != 0;
}
/**
* Set runtime checkpoint support checker.
* The instance of this class will have to decide whether
* checkpointing is allowed on current superstep.
*
* @param clazz checkpoint supported checker class
*/
public void setCheckpointSupportedChecker(
Class<? extends CheckpointSupportedChecker> clazz) {
GiraphConstants.CHECKPOINT_SUPPORTED_CHECKER.set(this, clazz);
}
/**
* Set the max task attempts
*
* @param maxTaskAttempts Max task attempts to use
*/
public void setMaxTaskAttempts(int maxTaskAttempts) {
MAX_TASK_ATTEMPTS.set(this, maxTaskAttempts);
}
/**
* Get the max task attempts
*
* @return Max task attempts or -1, if not set
*/
public int getMaxTaskAttempts() {
return MAX_TASK_ATTEMPTS.get(this);
}
/**
* Get the number of milliseconds to wait for an event before continuing on
*
* @return Number of milliseconds to wait for an event before continuing on
*/
public int getEventWaitMsecs() {
return EVENT_WAIT_MSECS.get(this);
}
/**
* Set the number of milliseconds to wait for an event before continuing on
*
* @param eventWaitMsecs Number of milliseconds to wait for an event before
* continuing on
*/
public void setEventWaitMsecs(int eventWaitMsecs) {
EVENT_WAIT_MSECS.set(this, eventWaitMsecs);
}
/**
* Get the maximum milliseconds to wait before giving up trying to get the
* minimum number of workers before a superstep.
*
* @return Maximum milliseconds to wait before giving up trying to get the
* minimum number of workers before a superstep
*/
public int getMaxMasterSuperstepWaitMsecs() {
return MAX_MASTER_SUPERSTEP_WAIT_MSECS.get(this);
}
public int getMaxCounterWaitMsecs() {
return MAX_COUNTER_WAIT_MSECS.get(this);
}
/**
* Set the maximum milliseconds to wait before giving up trying to get the
* minimum number of workers before a superstep.
*
* @param maxMasterSuperstepWaitMsecs Maximum milliseconds to wait before
* giving up trying to get the minimum
* number of workers before a superstep
*/
public void setMaxMasterSuperstepWaitMsecs(int maxMasterSuperstepWaitMsecs) {
MAX_MASTER_SUPERSTEP_WAIT_MSECS.set(this, maxMasterSuperstepWaitMsecs);
}
/**
* Check environment for Hadoop security token location in case we are
* executing the Giraph job on a MRv1 Hadoop cluster.
*/
public void configureHadoopSecurity() {
String hadoopTokenFilePath = System.getenv("HADOOP_TOKEN_FILE_LOCATION");
if (hadoopTokenFilePath != null) {
set("mapreduce.job.credentials.binary", hadoopTokenFilePath);
}
}
/**
* Check if we want to prioritize input splits which reside on the host.
*
* @return True iff we want to use input split locality
*/
public boolean useInputSplitLocality() {
return USE_INPUT_SPLIT_LOCALITY.get(this);
}
/**
* Get the local hostname on the given interface.
*
* @return The local hostname
* @throws UnknownHostException IP address of a host could not be determined
*/
public String getLocalHostname() throws UnknownHostException {
return DNS.getDefaultHost(
GiraphConstants.DNS_INTERFACE.get(this),
GiraphConstants.DNS_NAMESERVER.get(this)).toLowerCase();
}
/**
* Return local host name by default. Or local host IP if preferIP
* option is set.
* @return local host name or IP
* @throws UnknownHostException IP address of a host could not be determined
*/
public String getLocalHostOrIp() throws UnknownHostException {
if (GiraphConstants.PREFER_IP_ADDRESSES.get(this)) {
return InetAddress.getLocalHost().getHostAddress();
}
return getLocalHostname();
}
/**
* Set the maximum number of supersteps of this application. After this
* many supersteps are executed, the application will shutdown.
*
* @param maxNumberOfSupersteps Maximum number of supersteps
*/
public void setMaxNumberOfSupersteps(int maxNumberOfSupersteps) {
MAX_NUMBER_OF_SUPERSTEPS.set(this, maxNumberOfSupersteps);
}
/**
* Get the maximum number of supersteps of this application. After this
* many supersteps are executed, the application will shutdown.
*
* @return Maximum number of supersteps
*/
public int getMaxNumberOfSupersteps() {
return MAX_NUMBER_OF_SUPERSTEPS.get(this);
}
/**
* Get the output directory to write YourKit snapshots to
*
* @param context Map context
* @return output directory
*/
public String getYourKitOutputDir(Mapper.Context context) {
final String cacheKey = "giraph.yourkit.outputDirCached";
String outputDir = get(cacheKey);
if (outputDir == null) {
outputDir = getStringVars(YOURKIT_OUTPUT_DIR, YOURKIT_OUTPUT_DIR_DEFAULT,
context);
set(cacheKey, outputDir);
}
return outputDir;
}
/**
* Get string, replacing variables in the output.
*
* %JOB_ID% => job id
* %TASK_ID% => task id
* %USER% => owning user name
*
* @param key name of key to lookup
* @param context mapper context
* @return value for key, with variables expanded
*/
public String getStringVars(String key, Mapper.Context context) {
return getStringVars(key, null, context);
}
/**
* Get string, replacing variables in the output.
*
* %JOB_ID% => job id
* %TASK_ID% => task id
* %USER% => owning user name
*
* @param key name of key to lookup
* @param defaultValue value to return if no mapping exists. This can also
* have variables, which will be substituted.
* @param context mapper context
* @return value for key, with variables expanded
*/
public String getStringVars(String key, String defaultValue,
Mapper.Context context) {
String value = get(key);
if (value == null) {
if (defaultValue == null) {
return null;
}
value = defaultValue;
}
value = value.replace("%JOB_ID%", context.getJobID().toString());
value = value.replace("%TASK_ID%", context.getTaskAttemptID().toString());
value = value.replace("%USER%", get("user.name", "unknown_user"));
return value;
}
/**
* Get option whether to create a source vertex present only in edge input
*
* @return CREATE_EDGE_SOURCE_VERTICES option
*/
public boolean getCreateSourceVertex() {
return CREATE_EDGE_SOURCE_VERTICES.get(this);
}
/**
* set option whether to create a source vertex present only in edge input
* @param createVertex create source vertex option
*/
public void setCreateSourceVertex(boolean createVertex) {
CREATE_EDGE_SOURCE_VERTICES.set(this, createVertex);
}
/**
* Get the maximum timeout (in milliseconds) for waiting for all tasks
* to complete after the job is done.
*
* @return Wait task done timeout in milliseconds.
*/
public int getWaitTaskDoneTimeoutMs() {
return WAIT_TASK_DONE_TIMEOUT_MS.get(this);
}
/**
* Set the maximum timeout (in milliseconds) for waiting for all tasks
* to complete after the job is done.
*
* @param ms Milliseconds to set
*/
public void setWaitTaskDoneTimeoutMs(int ms) {
WAIT_TASK_DONE_TIMEOUT_MS.set(this, ms);
}
/**
* Check whether to track job progress on client or not
*
* @return True if job progress should be tracked on client
*/
public boolean trackJobProgressOnClient() {
return TRACK_JOB_PROGRESS_ON_CLIENT.get(this);
}
/**
* @return Number of retries when creating an HDFS file before failing.
*/
public int getHdfsFileCreationRetries() {
return HDFS_FILE_CREATION_RETRIES.get(this);
}
/**
* @return Milliseconds to wait before retrying an HDFS file creation
* operation.
*/
public int getHdfsFileCreationRetryWaitMs() {
return HDFS_FILE_CREATION_RETRY_WAIT_MS.get(this);
}
}
|
googleapis/sdk-platform-java | 38,067 | test/integration/goldens/library/src/com/google/cloud/example/library/v1/LibraryServiceClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.example.library.v1;
import static com.google.cloud.example.library.v1.LibraryServiceClient.ListBooksPagedResponse;
import static com.google.cloud.example.library.v1.LibraryServiceClient.ListShelvesPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.common.collect.Lists;
import com.google.example.library.v1.Book;
import com.google.example.library.v1.BookName;
import com.google.example.library.v1.CreateBookRequest;
import com.google.example.library.v1.CreateShelfRequest;
import com.google.example.library.v1.DeleteBookRequest;
import com.google.example.library.v1.DeleteShelfRequest;
import com.google.example.library.v1.GetBookRequest;
import com.google.example.library.v1.GetShelfRequest;
import com.google.example.library.v1.ListBooksRequest;
import com.google.example.library.v1.ListBooksResponse;
import com.google.example.library.v1.ListShelvesRequest;
import com.google.example.library.v1.ListShelvesResponse;
import com.google.example.library.v1.MergeShelvesRequest;
import com.google.example.library.v1.MoveBookRequest;
import com.google.example.library.v1.Shelf;
import com.google.example.library.v1.ShelfName;
import com.google.example.library.v1.UpdateBookRequest;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class LibraryServiceClientTest {
private static MockLibraryService mockLibraryService;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private LibraryServiceClient client;
@BeforeClass
public static void startStaticServer() {
mockLibraryService = new MockLibraryService();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockLibraryService));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
LibraryServiceSettings settings =
LibraryServiceSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = LibraryServiceClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void createShelfTest() throws Exception {
Shelf expectedResponse =
Shelf.newBuilder()
.setName(ShelfName.of("[SHELF_ID]").toString())
.setTheme("theme110327241")
.build();
mockLibraryService.addResponse(expectedResponse);
Shelf shelf = Shelf.newBuilder().build();
Shelf actualResponse = client.createShelf(shelf);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateShelfRequest actualRequest = ((CreateShelfRequest) actualRequests.get(0));
Assert.assertEquals(shelf, actualRequest.getShelf());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createShelfExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
Shelf shelf = Shelf.newBuilder().build();
client.createShelf(shelf);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getShelfTest() throws Exception {
Shelf expectedResponse =
Shelf.newBuilder()
.setName(ShelfName.of("[SHELF_ID]").toString())
.setTheme("theme110327241")
.build();
mockLibraryService.addResponse(expectedResponse);
ShelfName name = ShelfName.of("[SHELF_ID]");
Shelf actualResponse = client.getShelf(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetShelfRequest actualRequest = ((GetShelfRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getShelfExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
ShelfName name = ShelfName.of("[SHELF_ID]");
client.getShelf(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getShelfTest2() throws Exception {
Shelf expectedResponse =
Shelf.newBuilder()
.setName(ShelfName.of("[SHELF_ID]").toString())
.setTheme("theme110327241")
.build();
mockLibraryService.addResponse(expectedResponse);
String name = "name3373707";
Shelf actualResponse = client.getShelf(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetShelfRequest actualRequest = ((GetShelfRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getShelfExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
String name = "name3373707";
client.getShelf(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listShelvesTest() throws Exception {
Shelf responsesElement = Shelf.newBuilder().build();
ListShelvesResponse expectedResponse =
ListShelvesResponse.newBuilder()
.setNextPageToken("")
.addAllShelves(Arrays.asList(responsesElement))
.build();
mockLibraryService.addResponse(expectedResponse);
ListShelvesRequest request =
ListShelvesRequest.newBuilder()
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListShelvesPagedResponse pagedListResponse = client.listShelves(request);
List<Shelf> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getShelvesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListShelvesRequest actualRequest = ((ListShelvesRequest) actualRequests.get(0));
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listShelvesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
ListShelvesRequest request =
ListShelvesRequest.newBuilder()
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listShelves(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteShelfTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockLibraryService.addResponse(expectedResponse);
ShelfName name = ShelfName.of("[SHELF_ID]");
client.deleteShelf(name);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteShelfRequest actualRequest = ((DeleteShelfRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteShelfExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
ShelfName name = ShelfName.of("[SHELF_ID]");
client.deleteShelf(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteShelfTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockLibraryService.addResponse(expectedResponse);
String name = "name3373707";
client.deleteShelf(name);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteShelfRequest actualRequest = ((DeleteShelfRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteShelfExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
String name = "name3373707";
client.deleteShelf(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void mergeShelvesTest() throws Exception {
Shelf expectedResponse =
Shelf.newBuilder()
.setName(ShelfName.of("[SHELF_ID]").toString())
.setTheme("theme110327241")
.build();
mockLibraryService.addResponse(expectedResponse);
ShelfName name = ShelfName.of("[SHELF_ID]");
ShelfName otherShelf = ShelfName.of("[SHELF_ID]");
Shelf actualResponse = client.mergeShelves(name, otherShelf);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
MergeShelvesRequest actualRequest = ((MergeShelvesRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertEquals(otherShelf.toString(), actualRequest.getOtherShelf());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void mergeShelvesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
ShelfName name = ShelfName.of("[SHELF_ID]");
ShelfName otherShelf = ShelfName.of("[SHELF_ID]");
client.mergeShelves(name, otherShelf);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void mergeShelvesTest2() throws Exception {
Shelf expectedResponse =
Shelf.newBuilder()
.setName(ShelfName.of("[SHELF_ID]").toString())
.setTheme("theme110327241")
.build();
mockLibraryService.addResponse(expectedResponse);
ShelfName name = ShelfName.of("[SHELF_ID]");
String otherShelf = "otherShelf-193668870";
Shelf actualResponse = client.mergeShelves(name, otherShelf);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
MergeShelvesRequest actualRequest = ((MergeShelvesRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertEquals(otherShelf, actualRequest.getOtherShelf());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void mergeShelvesExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
ShelfName name = ShelfName.of("[SHELF_ID]");
String otherShelf = "otherShelf-193668870";
client.mergeShelves(name, otherShelf);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void mergeShelvesTest3() throws Exception {
Shelf expectedResponse =
Shelf.newBuilder()
.setName(ShelfName.of("[SHELF_ID]").toString())
.setTheme("theme110327241")
.build();
mockLibraryService.addResponse(expectedResponse);
String name = "name3373707";
ShelfName otherShelf = ShelfName.of("[SHELF_ID]");
Shelf actualResponse = client.mergeShelves(name, otherShelf);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
MergeShelvesRequest actualRequest = ((MergeShelvesRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertEquals(otherShelf.toString(), actualRequest.getOtherShelf());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void mergeShelvesExceptionTest3() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
String name = "name3373707";
ShelfName otherShelf = ShelfName.of("[SHELF_ID]");
client.mergeShelves(name, otherShelf);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void mergeShelvesTest4() throws Exception {
Shelf expectedResponse =
Shelf.newBuilder()
.setName(ShelfName.of("[SHELF_ID]").toString())
.setTheme("theme110327241")
.build();
mockLibraryService.addResponse(expectedResponse);
String name = "name3373707";
String otherShelf = "otherShelf-193668870";
Shelf actualResponse = client.mergeShelves(name, otherShelf);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
MergeShelvesRequest actualRequest = ((MergeShelvesRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertEquals(otherShelf, actualRequest.getOtherShelf());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void mergeShelvesExceptionTest4() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
String name = "name3373707";
String otherShelf = "otherShelf-193668870";
client.mergeShelves(name, otherShelf);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createBookTest() throws Exception {
Book expectedResponse =
Book.newBuilder()
.setName(BookName.of("[SHELF]", "[BOOK]").toString())
.setAuthor("author-1406328437")
.setTitle("title110371416")
.setRead(true)
.build();
mockLibraryService.addResponse(expectedResponse);
ShelfName parent = ShelfName.of("[SHELF_ID]");
Book book = Book.newBuilder().build();
Book actualResponse = client.createBook(parent, book);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateBookRequest actualRequest = ((CreateBookRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(book, actualRequest.getBook());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createBookExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
ShelfName parent = ShelfName.of("[SHELF_ID]");
Book book = Book.newBuilder().build();
client.createBook(parent, book);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createBookTest2() throws Exception {
Book expectedResponse =
Book.newBuilder()
.setName(BookName.of("[SHELF]", "[BOOK]").toString())
.setAuthor("author-1406328437")
.setTitle("title110371416")
.setRead(true)
.build();
mockLibraryService.addResponse(expectedResponse);
String parent = "parent-995424086";
Book book = Book.newBuilder().build();
Book actualResponse = client.createBook(parent, book);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateBookRequest actualRequest = ((CreateBookRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(book, actualRequest.getBook());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createBookExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
String parent = "parent-995424086";
Book book = Book.newBuilder().build();
client.createBook(parent, book);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getBookTest() throws Exception {
Book expectedResponse =
Book.newBuilder()
.setName(BookName.of("[SHELF]", "[BOOK]").toString())
.setAuthor("author-1406328437")
.setTitle("title110371416")
.setRead(true)
.build();
mockLibraryService.addResponse(expectedResponse);
BookName name = BookName.of("[SHELF]", "[BOOK]");
Book actualResponse = client.getBook(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetBookRequest actualRequest = ((GetBookRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getBookExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
BookName name = BookName.of("[SHELF]", "[BOOK]");
client.getBook(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getBookTest2() throws Exception {
Book expectedResponse =
Book.newBuilder()
.setName(BookName.of("[SHELF]", "[BOOK]").toString())
.setAuthor("author-1406328437")
.setTitle("title110371416")
.setRead(true)
.build();
mockLibraryService.addResponse(expectedResponse);
String name = "name3373707";
Book actualResponse = client.getBook(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetBookRequest actualRequest = ((GetBookRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getBookExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
String name = "name3373707";
client.getBook(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listBooksTest() throws Exception {
Book responsesElement = Book.newBuilder().build();
ListBooksResponse expectedResponse =
ListBooksResponse.newBuilder()
.setNextPageToken("")
.addAllBooks(Arrays.asList(responsesElement))
.build();
mockLibraryService.addResponse(expectedResponse);
ShelfName parent = ShelfName.of("[SHELF_ID]");
ListBooksPagedResponse pagedListResponse = client.listBooks(parent);
List<Book> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getBooksList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListBooksRequest actualRequest = ((ListBooksRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listBooksExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
ShelfName parent = ShelfName.of("[SHELF_ID]");
client.listBooks(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listBooksTest2() throws Exception {
Book responsesElement = Book.newBuilder().build();
ListBooksResponse expectedResponse =
ListBooksResponse.newBuilder()
.setNextPageToken("")
.addAllBooks(Arrays.asList(responsesElement))
.build();
mockLibraryService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListBooksPagedResponse pagedListResponse = client.listBooks(parent);
List<Book> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getBooksList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListBooksRequest actualRequest = ((ListBooksRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listBooksExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
String parent = "parent-995424086";
client.listBooks(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteBookTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockLibraryService.addResponse(expectedResponse);
BookName name = BookName.of("[SHELF]", "[BOOK]");
client.deleteBook(name);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteBookRequest actualRequest = ((DeleteBookRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteBookExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
BookName name = BookName.of("[SHELF]", "[BOOK]");
client.deleteBook(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteBookTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockLibraryService.addResponse(expectedResponse);
String name = "name3373707";
client.deleteBook(name);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteBookRequest actualRequest = ((DeleteBookRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteBookExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
String name = "name3373707";
client.deleteBook(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateBookTest() throws Exception {
Book expectedResponse =
Book.newBuilder()
.setName(BookName.of("[SHELF]", "[BOOK]").toString())
.setAuthor("author-1406328437")
.setTitle("title110371416")
.setRead(true)
.build();
mockLibraryService.addResponse(expectedResponse);
Book book = Book.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
Book actualResponse = client.updateBook(book, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateBookRequest actualRequest = ((UpdateBookRequest) actualRequests.get(0));
Assert.assertEquals(book, actualRequest.getBook());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateBookExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
Book book = Book.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateBook(book, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void moveBookTest() throws Exception {
Book expectedResponse =
Book.newBuilder()
.setName(BookName.of("[SHELF]", "[BOOK]").toString())
.setAuthor("author-1406328437")
.setTitle("title110371416")
.setRead(true)
.build();
mockLibraryService.addResponse(expectedResponse);
BookName name = BookName.of("[SHELF]", "[BOOK]");
ShelfName otherShelfName = ShelfName.of("[SHELF_ID]");
Book actualResponse = client.moveBook(name, otherShelfName);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
MoveBookRequest actualRequest = ((MoveBookRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertEquals(otherShelfName.toString(), actualRequest.getOtherShelfName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void moveBookExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
BookName name = BookName.of("[SHELF]", "[BOOK]");
ShelfName otherShelfName = ShelfName.of("[SHELF_ID]");
client.moveBook(name, otherShelfName);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void moveBookTest2() throws Exception {
Book expectedResponse =
Book.newBuilder()
.setName(BookName.of("[SHELF]", "[BOOK]").toString())
.setAuthor("author-1406328437")
.setTitle("title110371416")
.setRead(true)
.build();
mockLibraryService.addResponse(expectedResponse);
BookName name = BookName.of("[SHELF]", "[BOOK]");
String otherShelfName = "otherShelfName-1942963547";
Book actualResponse = client.moveBook(name, otherShelfName);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
MoveBookRequest actualRequest = ((MoveBookRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertEquals(otherShelfName, actualRequest.getOtherShelfName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void moveBookExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
BookName name = BookName.of("[SHELF]", "[BOOK]");
String otherShelfName = "otherShelfName-1942963547";
client.moveBook(name, otherShelfName);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void moveBookTest3() throws Exception {
Book expectedResponse =
Book.newBuilder()
.setName(BookName.of("[SHELF]", "[BOOK]").toString())
.setAuthor("author-1406328437")
.setTitle("title110371416")
.setRead(true)
.build();
mockLibraryService.addResponse(expectedResponse);
String name = "name3373707";
ShelfName otherShelfName = ShelfName.of("[SHELF_ID]");
Book actualResponse = client.moveBook(name, otherShelfName);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
MoveBookRequest actualRequest = ((MoveBookRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertEquals(otherShelfName.toString(), actualRequest.getOtherShelfName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void moveBookExceptionTest3() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
String name = "name3373707";
ShelfName otherShelfName = ShelfName.of("[SHELF_ID]");
client.moveBook(name, otherShelfName);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void moveBookTest4() throws Exception {
Book expectedResponse =
Book.newBuilder()
.setName(BookName.of("[SHELF]", "[BOOK]").toString())
.setAuthor("author-1406328437")
.setTitle("title110371416")
.setRead(true)
.build();
mockLibraryService.addResponse(expectedResponse);
String name = "name3373707";
String otherShelfName = "otherShelfName-1942963547";
Book actualResponse = client.moveBook(name, otherShelfName);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLibraryService.getRequests();
Assert.assertEquals(1, actualRequests.size());
MoveBookRequest actualRequest = ((MoveBookRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertEquals(otherShelfName, actualRequest.getOtherShelfName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void moveBookExceptionTest4() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLibraryService.addException(exception);
try {
String name = "name3373707";
String otherShelfName = "otherShelfName-1942963547";
client.moveBook(name, otherShelfName);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
googleapis/google-cloud-java | 38,360 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/InstancesStartWithEncryptionKeyRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest}
*/
public final class InstancesStartWithEncryptionKeyRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest)
InstancesStartWithEncryptionKeyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use InstancesStartWithEncryptionKeyRequest.newBuilder() to construct.
private InstancesStartWithEncryptionKeyRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InstancesStartWithEncryptionKeyRequest() {
disks_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new InstancesStartWithEncryptionKeyRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancesStartWithEncryptionKeyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancesStartWithEncryptionKeyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest.class,
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest.Builder.class);
}
public static final int DISKS_FIELD_NUMBER = 95594102;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk> disks_;
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk>
getDisksList() {
return disks_;
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDiskOrBuilder>
getDisksOrBuilderList() {
return disks_;
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
@java.lang.Override
public int getDisksCount() {
return disks_.size();
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk getDisks(int index) {
return disks_.get(index);
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDiskOrBuilder getDisksOrBuilder(
int index) {
return disks_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < disks_.size(); i++) {
output.writeMessage(95594102, disks_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < disks_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(95594102, disks_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest other =
(com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest) obj;
if (!getDisksList().equals(other.getDisksList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDisksCount() > 0) {
hash = (37 * hash) + DISKS_FIELD_NUMBER;
hash = (53 * hash) + getDisksList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest)
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancesStartWithEncryptionKeyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancesStartWithEncryptionKeyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest.class,
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest.Builder.class);
}
// Construct using
// com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (disksBuilder_ == null) {
disks_ = java.util.Collections.emptyList();
} else {
disks_ = null;
disksBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstancesStartWithEncryptionKeyRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest build() {
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest buildPartial() {
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest result =
new com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest result) {
if (disksBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
disks_ = java.util.Collections.unmodifiableList(disks_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.disks_ = disks_;
} else {
result.disks_ = disksBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest) {
return mergeFrom(
(com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest other) {
if (other
== com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest
.getDefaultInstance()) return this;
if (disksBuilder_ == null) {
if (!other.disks_.isEmpty()) {
if (disks_.isEmpty()) {
disks_ = other.disks_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDisksIsMutable();
disks_.addAll(other.disks_);
}
onChanged();
}
} else {
if (!other.disks_.isEmpty()) {
if (disksBuilder_.isEmpty()) {
disksBuilder_.dispose();
disksBuilder_ = null;
disks_ = other.disks_;
bitField0_ = (bitField0_ & ~0x00000001);
disksBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDisksFieldBuilder()
: null;
} else {
disksBuilder_.addAllMessages(other.disks_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 764752818:
{
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk m =
input.readMessage(
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.parser(),
extensionRegistry);
if (disksBuilder_ == null) {
ensureDisksIsMutable();
disks_.add(m);
} else {
disksBuilder_.addMessage(m);
}
break;
} // case 764752818
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk> disks_ =
java.util.Collections.emptyList();
private void ensureDisksIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
disks_ =
new java.util.ArrayList<com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk>(
disks_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk,
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.Builder,
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDiskOrBuilder>
disksBuilder_;
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public java.util.List<com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk>
getDisksList() {
if (disksBuilder_ == null) {
return java.util.Collections.unmodifiableList(disks_);
} else {
return disksBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public int getDisksCount() {
if (disksBuilder_ == null) {
return disks_.size();
} else {
return disksBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk getDisks(int index) {
if (disksBuilder_ == null) {
return disks_.get(index);
} else {
return disksBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public Builder setDisks(
int index, com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk value) {
if (disksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDisksIsMutable();
disks_.set(index, value);
onChanged();
} else {
disksBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public Builder setDisks(
int index,
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.Builder builderForValue) {
if (disksBuilder_ == null) {
ensureDisksIsMutable();
disks_.set(index, builderForValue.build());
onChanged();
} else {
disksBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public Builder addDisks(com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk value) {
if (disksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDisksIsMutable();
disks_.add(value);
onChanged();
} else {
disksBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public Builder addDisks(
int index, com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk value) {
if (disksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDisksIsMutable();
disks_.add(index, value);
onChanged();
} else {
disksBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public Builder addDisks(
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.Builder builderForValue) {
if (disksBuilder_ == null) {
ensureDisksIsMutable();
disks_.add(builderForValue.build());
onChanged();
} else {
disksBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public Builder addDisks(
int index,
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.Builder builderForValue) {
if (disksBuilder_ == null) {
ensureDisksIsMutable();
disks_.add(index, builderForValue.build());
onChanged();
} else {
disksBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public Builder addAllDisks(
java.lang.Iterable<? extends com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk>
values) {
if (disksBuilder_ == null) {
ensureDisksIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, disks_);
onChanged();
} else {
disksBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public Builder clearDisks() {
if (disksBuilder_ == null) {
disks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
disksBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public Builder removeDisks(int index) {
if (disksBuilder_ == null) {
ensureDisksIsMutable();
disks_.remove(index);
onChanged();
} else {
disksBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.Builder getDisksBuilder(
int index) {
return getDisksFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDiskOrBuilder
getDisksOrBuilder(int index) {
if (disksBuilder_ == null) {
return disks_.get(index);
} else {
return disksBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public java.util.List<
? extends com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDiskOrBuilder>
getDisksOrBuilderList() {
if (disksBuilder_ != null) {
return disksBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(disks_);
}
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.Builder
addDisksBuilder() {
return getDisksFieldBuilder()
.addBuilder(
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.getDefaultInstance());
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.Builder addDisksBuilder(
int index) {
return getDisksFieldBuilder()
.addBuilder(
index,
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.getDefaultInstance());
}
/**
*
*
* <pre>
* Array of disks associated with this instance that are protected with a customer-supplied encryption key. In order to start the instance, the disk url and its corresponding key must be provided. If the disk is not protected with a customer-supplied encryption key it should not be specified.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk disks = 95594102;
* </code>
*/
public java.util.List<com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.Builder>
getDisksBuilderList() {
return getDisksFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk,
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.Builder,
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDiskOrBuilder>
getDisksFieldBuilder() {
if (disksBuilder_ == null) {
disksBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk,
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDisk.Builder,
com.google.cloud.compute.v1.CustomerEncryptionKeyProtectedDiskOrBuilder>(
disks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
disks_ = null;
}
return disksBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest)
private static final com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest();
}
public static com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InstancesStartWithEncryptionKeyRequest> PARSER =
new com.google.protobuf.AbstractParser<InstancesStartWithEncryptionKeyRequest>() {
@java.lang.Override
public InstancesStartWithEncryptionKeyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<InstancesStartWithEncryptionKeyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InstancesStartWithEncryptionKeyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.InstancesStartWithEncryptionKeyRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/jena | 38,353 | jena-core/src/main/java/org/apache/jena/ext/xerces/impl/dv/xs/XSSimpleTypeDecl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.ext.xerces.impl.dv.xs;
import java.util.Vector;
import java.util.regex.Pattern;
import org.apache.jena.ext.xerces.impl.dv.*;
import org.apache.jena.ext.xerces.xs.*;
import org.apache.jena.ext.xerces.util.XercesXMLChar;
/**
* {@literal @xerces.internal}
*
* @author Sandy Gao, IBM
* @author Neeraj Bajaj, Sun Microsystems, inc.
*
* @version $Id: XSSimpleTypeDecl.java 1026362 2010-10-22 15:15:18Z sandygao $
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public class XSSimpleTypeDecl implements XSSimpleType {
protected static final short DV_ANYSIMPLETYPE = PRIMITIVE_ANYSIMPLETYPE;
protected static final short DV_STRING = PRIMITIVE_STRING;
protected static final short DV_BOOLEAN = PRIMITIVE_BOOLEAN;
protected static final short DV_DECIMAL = PRIMITIVE_DECIMAL;
protected static final short DV_FLOAT = PRIMITIVE_FLOAT;
protected static final short DV_DOUBLE = PRIMITIVE_DOUBLE;
protected static final short DV_DURATION = PRIMITIVE_DURATION;
protected static final short DV_DATETIME = PRIMITIVE_DATETIME;
protected static final short DV_TIME = PRIMITIVE_TIME;
protected static final short DV_DATE = PRIMITIVE_DATE;
protected static final short DV_GYEARMONTH = PRIMITIVE_GYEARMONTH;
protected static final short DV_GYEAR = PRIMITIVE_GYEAR;
protected static final short DV_GMONTHDAY = PRIMITIVE_GMONTHDAY;
protected static final short DV_GDAY = PRIMITIVE_GDAY;
protected static final short DV_GMONTH = PRIMITIVE_GMONTH;
protected static final short DV_HEXBINARY = PRIMITIVE_HEXBINARY;
protected static final short DV_BASE64BINARY = PRIMITIVE_BASE64BINARY;
protected static final short DV_ANYURI = PRIMITIVE_ANYURI;
protected static final short DV_PRECISIONDECIMAL = PRIMITIVE_PRECISIONDECIMAL;
protected static final short DV_INTEGER = DV_PRECISIONDECIMAL + 1;
protected static final short DV_YEARMONTHDURATION = DV_PRECISIONDECIMAL + 2;
protected static final short DV_DAYTIMEDURATION = DV_PRECISIONDECIMAL + 3;
protected static final short DV_ANYATOMICTYPE = DV_PRECISIONDECIMAL + 4;
protected static final short DV_DATETIMESTAMP = DV_PRECISIONDECIMAL + 5;
private static final TypeValidator[] gDVs = {
new AnySimpleDV(),
new StringDV(),
new BooleanDV(),
new DecimalDV(),
new FloatDV(),
new DoubleDV(),
new DurationDV(),
new DateTimeDV(),
new TimeDV(),
new DateDV(),
new YearMonthDV(),
new YearDV(),
new MonthDayDV(),
new DayDV(),
new MonthDV(),
new HexBinaryDV(),
new Base64BinaryDV(),
new AnyURIDV(),
new PrecisionDecimalDV(), // XML Schema 1.1 type
new IntegerDV(),
new YearMonthDurationDV(), // XML Schema 1.1 type
new DayTimeDurationDV(), // XML Schema 1.1 type
new AnyAtomicDV(), // XML Schema 1.1 type
new DateTimeStampDV() // XML Schema 1.1 type
};
static final short NORMALIZE_NONE = 0;
static final short NORMALIZE_TRIM = 1;
static final short NORMALIZE_FULL = 2;
static final short[] fDVNormalizeType = {
NORMALIZE_NONE, //AnySimpleDV(),
NORMALIZE_FULL, //StringDV(),
NORMALIZE_TRIM, //BooleanDV(),
NORMALIZE_TRIM, //DecimalDV(),
NORMALIZE_TRIM, //FloatDV(),
NORMALIZE_TRIM, //DoubleDV(),
NORMALIZE_TRIM, //DurationDV(),
NORMALIZE_TRIM, //DateTimeDV(),
NORMALIZE_TRIM, //TimeDV(),
NORMALIZE_TRIM, //DateDV(),
NORMALIZE_TRIM, //YearMonthDV(),
NORMALIZE_TRIM, //YearDV(),
NORMALIZE_TRIM, //MonthDayDV(),
NORMALIZE_TRIM, //DayDV(),
NORMALIZE_TRIM, //MonthDV(),
NORMALIZE_TRIM, //HexBinaryDV(),
NORMALIZE_NONE, //Base64BinaryDV(), // Base64 know how to deal with spaces
NORMALIZE_TRIM, //AnyURIDV(),
NORMALIZE_TRIM, //PrecisionDecimalDV() (Schema 1.1)
NORMALIZE_TRIM, //IntegerDV(),
NORMALIZE_TRIM, //YearMonthDurationDV() (Schema 1.1)
NORMALIZE_TRIM, //DayTimeDurationDV() (Schema 1.1)
NORMALIZE_NONE, //AnyAtomicDV() (Schema 1.1)
};
static final short SPECIAL_PATTERN_NONE = 0;
static final short SPECIAL_PATTERN_NMTOKEN = 1;
static final short SPECIAL_PATTERN_NAME = 2;
static final short SPECIAL_PATTERN_NCNAME = 3;
static final String[] SPECIAL_PATTERN_STRING = {
"NONE", "NMTOKEN", "Name", "NCName"
};
static final String[] WS_FACET_STRING = {
"preserve", "replace", "collapse"
};
static final String URI_SCHEMAFORSCHEMA = "http://www.w3.org/2001/XMLSchema";
private final TypeValidator[] fDVs = gDVs;
// this will be true if this is a static XSSimpleTypeDecl
// and hence must remain immutable (i.e., applyFacets
// may not be permitted to have any effect).
private boolean fIsImmutable = false;
// The most specific built-in type kind.
private short fBuiltInKind;
private String fTypeName;
private String fTargetNamespace;
private XSSimpleTypeDecl fBase;
private short fVariety = -1;
private short fValidationDV = -1;
private short fFacetsDefined = 0;
private short fFixedFacet = 0;
//for constraining facets
private short fWhiteSpace = 0;
private int fLength = -1;
private int fMinLength = -1;
private int fMaxLength = -1;
private int fTotalDigits = -1;
private int fFractionDigits = -1;
private Vector fPattern;
private Vector fPatternStr;
private ValidatedInfo[] fEnumeration;
private int fEnumerationSize;
private Object fMaxInclusive;
private Object fMaxExclusive;
private Object fMinExclusive;
private Object fMinInclusive;
private short fPatternType = SPECIAL_PATTERN_NONE;
// for fundamental facets
private short fOrdered;
private boolean fFinite;
private boolean fNumeric;
// default constructor
public XSSimpleTypeDecl(){}
//Create a new built-in primitive types (and integer/yearMonthDuration)
protected XSSimpleTypeDecl(XSSimpleTypeDecl base, String name, short validateDV,
short ordered, boolean finite,
boolean numeric, boolean isImmutable, short builtInKind) {
fIsImmutable = isImmutable;
fBase = base;
fTypeName = name;
fTargetNamespace = URI_SCHEMAFORSCHEMA;
// To simplify the code for anySimpleType, we treat it as an atomic type
fVariety = VARIETY_ATOMIC;
fValidationDV = validateDV;
fFacetsDefined = FACET_WHITESPACE;
if (validateDV == DV_ANYSIMPLETYPE ||
validateDV == DV_ANYATOMICTYPE ||
validateDV == DV_STRING) {
fWhiteSpace = WS_PRESERVE;
}
else {
fWhiteSpace = WS_COLLAPSE;
fFixedFacet = FACET_WHITESPACE;
}
this.fOrdered = ordered;
this.fFinite = finite;
this.fNumeric = numeric;
// Specify the build in kind for this primitive type
fBuiltInKind = builtInKind;
}
//Create a new simple type for restriction for built-in types
protected XSSimpleTypeDecl(XSSimpleTypeDecl base, String name, String uri, boolean isImmutable, short builtInKind) {
this(base, name, uri, isImmutable);
// Specify the build in kind for this built-in type
fBuiltInKind = builtInKind;
}
//Create a new simple type for restriction.
protected XSSimpleTypeDecl(XSSimpleTypeDecl base, String name, String uri, boolean isImmutable) {
fBase = base;
fTypeName = name;
fTargetNamespace = uri;
fVariety = fBase.fVariety;
fValidationDV = fBase.fValidationDV;
// always inherit facets from the base.
// in case a type is created, but applyFacets is not called
fLength = fBase.fLength;
fMinLength = fBase.fMinLength;
fMaxLength = fBase.fMaxLength;
fPattern = fBase.fPattern;
fPatternStr = fBase.fPatternStr;
fEnumeration = fBase.fEnumeration;
fEnumerationSize = fBase.fEnumerationSize;
fWhiteSpace = fBase.fWhiteSpace;
fMaxExclusive = fBase.fMaxExclusive;
fMaxInclusive = fBase.fMaxInclusive;
fMinExclusive = fBase.fMinExclusive;
fMinInclusive = fBase.fMinInclusive;
fTotalDigits = fBase.fTotalDigits;
fFractionDigits = fBase.fFractionDigits;
fPatternType = fBase.fPatternType;
fFixedFacet = fBase.fFixedFacet;
fFacetsDefined = fBase.fFacetsDefined;
//we also set fundamental facets information in case applyFacets is not called.
calcFundamentalFacets();
fIsImmutable = isImmutable;
// Inherit from the base type
fBuiltInKind = base.fBuiltInKind;
}
@Override
public String getName() {
return fTypeName;
}
@Override
public String getNamespace() {
return fTargetNamespace;
}
@Override
public XSTypeDefinition getBaseType(){
return fBase;
}
/**
* built-in derived types by restriction
*/
void applyFacets1(XSFacets facets, short presentFacet) {
try {
applyFacets(facets, presentFacet, SPECIAL_PATTERN_NONE);
} catch (InvalidDatatypeFacetException e) {
// should never gets here, internel error
throw new RuntimeException("internal error");
}
// we've now applied facets; so lock this object:
fIsImmutable = true;
}
/**
* built-in derived types by restriction
*/
void applyFacets2(XSFacets facets, short patternType) {
try {
applyFacets(facets, XSSimpleTypeDefinition.FACET_WHITESPACE, patternType);
} catch (InvalidDatatypeFacetException e) {
// should never gets here, internal error
throw new RuntimeException("internal error", e);
}
// we've now applied facets; so lock this object:
fIsImmutable = true;
}
/**
* If <restriction> is chosen, or built-in derived types by restriction
*/
void applyFacets(XSFacets facets, short presentFacet, short patternType)
throws InvalidDatatypeFacetException {
// if the object is immutable, should not apply facets...
if(fIsImmutable) return;
ValidatedInfo tempInfo = new ValidatedInfo();
// clear facets. because we always inherit facets in the constructor
// REVISIT: in fact, we don't need to clear them.
// we can convert 5 string values (4 bounds + 1 enum) to actual values,
// store them somewhere, then do facet checking at once, instead of
// going through the following steps. (lots of checking are redundant:
// for example, ((presentFacet & FACET_XXX) != 0))
fFacetsDefined = 0;
fFixedFacet = 0;
int result = 0 ;
// step 1: parse present facets
short allowedFacet = fDVs[fValidationDV].getAllowedFacets();
// pattern
if ((presentFacet & FACET_PATTERN) != 0) {
if ((allowedFacet & FACET_PATTERN) == 0) {
reportError("cos-applicable-facets", new Object[]{"pattern", fTypeName});
} else {
Pattern regex = null;
try {
regex = Pattern.compile(facets.pattern);
} catch (Exception e) {
reportError("InvalidRegex", new Object[]{facets.pattern, e.getLocalizedMessage()});
}
if (regex != null) {
fPattern = new Vector();
fPattern.addElement(regex);
fPatternStr = new Vector();
fPatternStr.addElement(facets.pattern);
fFacetsDefined |= FACET_PATTERN;
}
}
}
// whiteSpace
if ((presentFacet & FACET_WHITESPACE) != 0) {
if ((allowedFacet & FACET_WHITESPACE) == 0) {
reportError("cos-applicable-facets", new Object[]{"whiteSpace", fTypeName});
} else {
fWhiteSpace = facets.whiteSpace;
fFacetsDefined |= FACET_WHITESPACE;
}
}
// maxInclusive
if ((presentFacet & FACET_MAXINCLUSIVE) != 0) {
if ((allowedFacet & FACET_MAXINCLUSIVE) == 0) {
reportError("cos-applicable-facets", new Object[]{"maxInclusive", fTypeName});
} else {
try {
fMaxInclusive = fBase.getActualValue(facets.maxInclusive, tempInfo);
fFacetsDefined |= FACET_MAXINCLUSIVE;
} catch (InvalidDatatypeValueException ide) {
reportError(ide.getKey(), ide.getArgs());
reportError("FacetValueFromBase", new Object[]{fTypeName, facets.maxInclusive,
"maxInclusive", fBase.getName()});
}
// check against fixed value in base
if (((fBase.fFacetsDefined & FACET_MAXINCLUSIVE) != 0)) {
if ((fBase.fFixedFacet & FACET_MAXINCLUSIVE) != 0) {
if (fDVs[fValidationDV].compare(fMaxInclusive, fBase.fMaxInclusive) != 0)
reportError( "FixedFacetValue", new Object[]{"maxInclusive", fMaxInclusive, fBase.fMaxInclusive, fTypeName});
}
}
// maxInclusive from base
try {
fBase.validate(tempInfo);
} catch (InvalidDatatypeValueException ide) {
reportError(ide.getKey(), ide.getArgs());
reportError("FacetValueFromBase", new Object[]{fTypeName, facets.maxInclusive,
"maxInclusive", fBase.getName()});
}
}
}
// maxExclusive
boolean needCheckBase = true;
if ((presentFacet & FACET_MAXEXCLUSIVE) != 0) {
if ((allowedFacet & FACET_MAXEXCLUSIVE) == 0) {
reportError("cos-applicable-facets", new Object[]{"maxExclusive", fTypeName});
} else {
try {
fMaxExclusive = fBase.getActualValue(facets.maxExclusive, tempInfo);
fFacetsDefined |= FACET_MAXEXCLUSIVE;
} catch (InvalidDatatypeValueException ide) {
reportError(ide.getKey(), ide.getArgs());
reportError("FacetValueFromBase", new Object[]{fTypeName, facets.maxExclusive,
"maxExclusive", fBase.getName()});
}
// check against fixed value in base
if (((fBase.fFacetsDefined & FACET_MAXEXCLUSIVE) != 0)) {
result = fDVs[fValidationDV].compare(fMaxExclusive, fBase.fMaxExclusive);
if ((fBase.fFixedFacet & FACET_MAXEXCLUSIVE) != 0 && result != 0) {
reportError( "FixedFacetValue", new Object[]{"maxExclusive", facets.maxExclusive, fBase.fMaxExclusive, fTypeName});
}
if (result == 0) {
needCheckBase = false;
}
}
// maxExclusive from base
if (needCheckBase) {
try {
fBase.validate(tempInfo);
} catch (InvalidDatatypeValueException ide) {
reportError(ide.getKey(), ide.getArgs());
reportError("FacetValueFromBase", new Object[]{fTypeName, facets.maxExclusive,
"maxExclusive", fBase.getName()});
}
}
// If maxExclusive == base.maxExclusive, then we only need to check
// maxExclusive <= base.maxInclusive
else if (((fBase.fFacetsDefined & FACET_MAXINCLUSIVE) != 0)) {
if (fDVs[fValidationDV].compare(fMaxExclusive, fBase.fMaxInclusive) > 0) {
reportError( "maxExclusive-valid-restriction.2", new Object[]{facets.maxExclusive, fBase.fMaxInclusive});
}
}
}
}
// minExclusive
needCheckBase = true;
if ((presentFacet & FACET_MINEXCLUSIVE) != 0) {
if ((allowedFacet & FACET_MINEXCLUSIVE) == 0) {
reportError("cos-applicable-facets", new Object[]{"minExclusive", fTypeName});
} else {
try {
fMinExclusive = fBase.getActualValue(facets.minExclusive, tempInfo);
fFacetsDefined |= FACET_MINEXCLUSIVE;
} catch (InvalidDatatypeValueException ide) {
reportError(ide.getKey(), ide.getArgs());
reportError("FacetValueFromBase", new Object[]{fTypeName, facets.minExclusive,
"minExclusive", fBase.getName()});
}
// check against fixed value in base
if (((fBase.fFacetsDefined & FACET_MINEXCLUSIVE) != 0)) {
result = fDVs[fValidationDV].compare(fMinExclusive, fBase.fMinExclusive);
if ((fBase.fFixedFacet & FACET_MINEXCLUSIVE) != 0 && result != 0) {
reportError( "FixedFacetValue", new Object[]{"minExclusive", facets.minExclusive, fBase.fMinExclusive, fTypeName});
}
if (result == 0) {
needCheckBase = false;
}
}
// minExclusive from base
if (needCheckBase) {
try {
fBase.validate(tempInfo);
} catch (InvalidDatatypeValueException ide) {
reportError(ide.getKey(), ide.getArgs());
reportError("FacetValueFromBase", new Object[]{fTypeName, facets.minExclusive,
"minExclusive", fBase.getName()});
}
}
// If minExclusive == base.minExclusive, then we only need to check
// minExclusive >= base.minInclusive
else if (((fBase.fFacetsDefined & FACET_MININCLUSIVE) != 0)) {
if (fDVs[fValidationDV].compare(fMinExclusive, fBase.fMinInclusive) < 0) {
reportError( "minExclusive-valid-restriction.3", new Object[]{facets.minExclusive, fBase.fMinInclusive});
}
}
}
}
// minInclusive
if ((presentFacet & FACET_MININCLUSIVE) != 0) {
if ((allowedFacet & FACET_MININCLUSIVE) == 0) {
reportError("cos-applicable-facets", new Object[]{"minInclusive", fTypeName});
} else {
try {
fMinInclusive = fBase.getActualValue(facets.minInclusive, tempInfo);
fFacetsDefined |= FACET_MININCLUSIVE;
} catch (InvalidDatatypeValueException ide) {
reportError(ide.getKey(), ide.getArgs());
reportError("FacetValueFromBase", new Object[]{fTypeName, facets.minInclusive,
"minInclusive", fBase.getName()});
}
// check against fixed value in base
if (((fBase.fFacetsDefined & FACET_MININCLUSIVE) != 0)) {
if ((fBase.fFixedFacet & FACET_MININCLUSIVE) != 0) {
if (fDVs[fValidationDV].compare(fMinInclusive, fBase.fMinInclusive) != 0)
reportError( "FixedFacetValue", new Object[]{"minInclusive", facets.minInclusive, fBase.fMinInclusive, fTypeName});
}
}
// minInclusive from base
try {
fBase.validate(tempInfo);
} catch (InvalidDatatypeValueException ide) {
reportError(ide.getKey(), ide.getArgs());
reportError("FacetValueFromBase", new Object[]{fTypeName, facets.minInclusive,
"minInclusive", fBase.getName()});
}
}
}
// token type: internal use, so do less checking
if (patternType != SPECIAL_PATTERN_NONE) {
fPatternType = patternType;
}
// step 2: check facets against each other: length, bounds
if(fFacetsDefined != 0) {
// check 4.3.8.c1 error: maxInclusive + maxExclusive
if (((fFacetsDefined & FACET_MAXEXCLUSIVE) != 0) && ((fFacetsDefined & FACET_MAXINCLUSIVE) != 0)) {
reportError( "maxInclusive-maxExclusive", new Object[]{fMaxInclusive, fMaxExclusive, fTypeName});
}
// check 4.3.9.c1 error: minInclusive + minExclusive
if (((fFacetsDefined & FACET_MINEXCLUSIVE) != 0) && ((fFacetsDefined & FACET_MININCLUSIVE) != 0)) {
reportError("minInclusive-minExclusive", new Object[]{fMinInclusive, fMinExclusive, fTypeName});
}
// check 4.3.7.c1 must: minInclusive <= maxInclusive
if (((fFacetsDefined & FACET_MAXINCLUSIVE) != 0) && ((fFacetsDefined & FACET_MININCLUSIVE) != 0)) {
result = fDVs[fValidationDV].compare(fMinInclusive, fMaxInclusive);
if (result != -1 && result != 0)
reportError("minInclusive-less-than-equal-to-maxInclusive", new Object[]{fMinInclusive, fMaxInclusive, fTypeName});
}
// check 4.3.8.c2 must: minExclusive <= maxExclusive ??? minExclusive < maxExclusive
if (((fFacetsDefined & FACET_MAXEXCLUSIVE) != 0) && ((fFacetsDefined & FACET_MINEXCLUSIVE) != 0)) {
result = fDVs[fValidationDV].compare(fMinExclusive, fMaxExclusive);
if (result != -1 && result != 0)
reportError( "minExclusive-less-than-equal-to-maxExclusive", new Object[]{fMinExclusive, fMaxExclusive, fTypeName});
}
// check 4.3.9.c2 must: minExclusive < maxInclusive
if (((fFacetsDefined & FACET_MAXINCLUSIVE) != 0) && ((fFacetsDefined & FACET_MINEXCLUSIVE) != 0)) {
if (fDVs[fValidationDV].compare(fMinExclusive, fMaxInclusive) != -1)
reportError( "minExclusive-less-than-maxInclusive", new Object[]{fMinExclusive, fMaxInclusive, fTypeName});
}
// check 4.3.10.c1 must: minInclusive < maxExclusive
if (((fFacetsDefined & FACET_MAXEXCLUSIVE) != 0) && ((fFacetsDefined & FACET_MININCLUSIVE) != 0)) {
if (fDVs[fValidationDV].compare(fMinInclusive, fMaxExclusive) != -1)
reportError( "minInclusive-less-than-maxExclusive", new Object[]{fMinInclusive, fMaxExclusive, fTypeName});
}
// check 4.3.6.c1 error:
// (whiteSpace = preserve || whiteSpace = replace) && fBase.whiteSpace = collapese or
// whiteSpace = preserve && fBase.whiteSpace = replace
if ( (fFacetsDefined & FACET_WHITESPACE) != 0 && (fBase.fFacetsDefined & FACET_WHITESPACE) != 0 ){
if ( (fBase.fFixedFacet & FACET_WHITESPACE) != 0 && fWhiteSpace != fBase.fWhiteSpace ) {
reportError( "FixedFacetValue", new Object[]{"whiteSpace", whiteSpaceValue(fWhiteSpace), whiteSpaceValue(fBase.fWhiteSpace), fTypeName});
}
if ( fWhiteSpace == WS_PRESERVE && fBase.fWhiteSpace == WS_COLLAPSE ){
reportError( "whiteSpace-valid-restriction.1", new Object[]{fTypeName, "preserve"});
}
if ( fWhiteSpace == WS_REPLACE && fBase.fWhiteSpace == WS_COLLAPSE ){
reportError( "whiteSpace-valid-restriction.1", new Object[]{fTypeName, "replace"});
}
if ( fWhiteSpace == WS_PRESERVE && fBase.fWhiteSpace == WS_REPLACE ){
reportError( "whiteSpace-valid-restriction.2", new Object[]{fTypeName});
}
}
}//fFacetsDefined != null
// step 4: inherit other facets from base (including fTokeyType)
// inherit pattern
if ( (fBase.fFacetsDefined & FACET_PATTERN) != 0 ) {
if ((fFacetsDefined & FACET_PATTERN) == 0) {
fFacetsDefined |= FACET_PATTERN;
fPattern = fBase.fPattern;
fPatternStr = fBase.fPatternStr;
}
else {
for (int i = fBase.fPattern.size()-1; i >= 0; --i) {
fPattern.addElement(fBase.fPattern.elementAt(i));
fPatternStr.addElement(fBase.fPatternStr.elementAt(i));
}
}
}
// inherit whiteSpace
if ( (fFacetsDefined & FACET_WHITESPACE) == 0 && (fBase.fFacetsDefined & FACET_WHITESPACE) != 0 ) {
fFacetsDefined |= FACET_WHITESPACE;
fWhiteSpace = fBase.fWhiteSpace;
}
// inherit maxExclusive
if ((( fBase.fFacetsDefined & FACET_MAXEXCLUSIVE) != 0) &&
!((fFacetsDefined & FACET_MAXEXCLUSIVE) != 0) && !((fFacetsDefined & FACET_MAXINCLUSIVE) != 0)) {
fFacetsDefined |= FACET_MAXEXCLUSIVE;
fMaxExclusive = fBase.fMaxExclusive;
}
// inherit maxInclusive
if ((( fBase.fFacetsDefined & FACET_MAXINCLUSIVE) != 0) &&
!((fFacetsDefined & FACET_MAXEXCLUSIVE) != 0) && !((fFacetsDefined & FACET_MAXINCLUSIVE) != 0)) {
fFacetsDefined |= FACET_MAXINCLUSIVE;
fMaxInclusive = fBase.fMaxInclusive;
}
// inherit minExclusive
if ((( fBase.fFacetsDefined & FACET_MINEXCLUSIVE) != 0) &&
!((fFacetsDefined & FACET_MINEXCLUSIVE) != 0) && !((fFacetsDefined & FACET_MININCLUSIVE) != 0)) {
fFacetsDefined |= FACET_MINEXCLUSIVE;
fMinExclusive = fBase.fMinExclusive;
}
// inherit minExclusive
if ((( fBase.fFacetsDefined & FACET_MININCLUSIVE) != 0) &&
!((fFacetsDefined & FACET_MINEXCLUSIVE) != 0) && !((fFacetsDefined & FACET_MININCLUSIVE) != 0)) {
fFacetsDefined |= FACET_MININCLUSIVE;
fMinInclusive = fBase.fMinInclusive;
}
//inherit tokeytype
if ((fPatternType == SPECIAL_PATTERN_NONE ) && (fBase.fPatternType != SPECIAL_PATTERN_NONE)) {
fPatternType = fBase.fPatternType ;
}
// step 5: mark fixed values
fFixedFacet |= fBase.fFixedFacet;
//step 6: setting fundamental facets
calcFundamentalFacets();
} //applyFacets()
/**
* validate a value, and return the compiled form
*/
@Override
public Object validate(String content, ValidatedInfo validatedInfo) throws InvalidDatatypeValueException {
if (validatedInfo == null)
validatedInfo = new ValidatedInfo();
// first normalize string value, and convert it to actual value
Object ob = getActualValue(content, validatedInfo);
validate(validatedInfo);
return ob;
}
/**
* validate an actual value against this DV
*
* @param validatedInfo used to provide the actual value and member types
*/
@Override
public void validate(ValidatedInfo validatedInfo)
throws InvalidDatatypeValueException {
// then validate the actual value against the facets
if (fFacetsDefined != 0 && fFacetsDefined != FACET_WHITESPACE) {
checkFacets(validatedInfo);
}
}
private void checkFacets(ValidatedInfo validatedInfo) throws InvalidDatatypeValueException {
Object ob = validatedInfo.actualValue;
String content = validatedInfo.normalizedValue;
int compare;
//maxinclusive
if ( (fFacetsDefined & FACET_MAXINCLUSIVE) != 0 ) {
compare = fDVs[fValidationDV].compare(ob, fMaxInclusive);
if (compare != -1 && compare != 0) {
throw new InvalidDatatypeValueException("cvc-maxInclusive-valid",
new Object[] {content, fMaxInclusive, fTypeName});
}
}
//maxExclusive
if ( (fFacetsDefined & FACET_MAXEXCLUSIVE) != 0 ) {
compare = fDVs[fValidationDV].compare(ob, fMaxExclusive );
if (compare != -1) {
throw new InvalidDatatypeValueException("cvc-maxExclusive-valid",
new Object[] {content, fMaxExclusive, fTypeName});
}
}
//minInclusive
if ( (fFacetsDefined & FACET_MININCLUSIVE) != 0 ) {
compare = fDVs[fValidationDV].compare(ob, fMinInclusive);
if (compare != 1 && compare != 0) {
throw new InvalidDatatypeValueException("cvc-minInclusive-valid",
new Object[] {content, fMinInclusive, fTypeName});
}
}
//minExclusive
if ( (fFacetsDefined & FACET_MINEXCLUSIVE) != 0 ) {
compare = fDVs[fValidationDV].compare(ob, fMinExclusive);
if (compare != 1) {
throw new InvalidDatatypeValueException("cvc-minExclusive-valid",
new Object[] {content, fMinExclusive, fTypeName});
}
}
}
//we can still return object for internal use.
private Object getActualValue(Object content, ValidatedInfo validatedInfo)
throws InvalidDatatypeValueException{
String nvalue;
nvalue = normalize(content, fWhiteSpace);
if ( (fFacetsDefined & FACET_PATTERN ) != 0 ) {
Pattern regex;
for (int idx = fPattern.size()-1; idx >= 0; idx--) {
regex = (Pattern)fPattern.elementAt(idx);
if (!regex.matcher(nvalue).matches()) {
throw new InvalidDatatypeValueException("cvc-pattern-valid",
new Object[]{content,
fPatternStr.elementAt(idx),
fTypeName});
}
}
}
// validate special kinds of token, in place of old pattern matching
if (fPatternType != SPECIAL_PATTERN_NONE) {
boolean seenErr = false;
if (fPatternType == SPECIAL_PATTERN_NMTOKEN) {
// PATTERN "\\c+"
seenErr = !XercesXMLChar.isValidNmtoken(nvalue);
}
else if (fPatternType == SPECIAL_PATTERN_NAME) {
// PATTERN "\\i\\c*"
seenErr = !XercesXMLChar.isValidName(nvalue);
}
else if (fPatternType == SPECIAL_PATTERN_NCNAME) {
// PATTERN "[\\i-[:]][\\c-[:]]*"
seenErr = !XercesXMLChar.isValidNCName(nvalue);
}
if (seenErr) {
throw new InvalidDatatypeValueException("cvc-datatype-valid.1.2.1",
new Object[]{nvalue, SPECIAL_PATTERN_STRING[fPatternType]});
}
}
validatedInfo.normalizedValue = nvalue;
Object avalue = fDVs[fValidationDV].getActualValue(nvalue);
validatedInfo.actualValue = avalue;
validatedInfo.actualValueType = fBuiltInKind;
return avalue;
}//getActualValue()
@Override
public boolean isEqual(Object value1, Object value2) {
if (value1 == null) {
return false;
}
return value1.equals(value2);
}//isEqual()
// normalize the string according to the whiteSpace facet
public static String normalize(String content, short ws) {
int len = content == null ? 0 : content.length();
if (len == 0 || ws == WS_PRESERVE)
return content;
StringBuilder sb = new StringBuilder();
if (ws == WS_REPLACE) {
char ch;
// when it's replace, just replace #x9, #xa, #xd by #x20
for (int i = 0; i < len; i++) {
ch = content.charAt(i);
if (ch != 0x9 && ch != 0xa && ch != 0xd)
sb.append(ch);
else
sb.append((char)0x20);
}
} else {
char ch;
int i;
boolean isLeading = true;
// when it's collapse
for (i = 0; i < len; i++) {
ch = content.charAt(i);
// append real characters, so we passed leading ws
if (ch != 0x9 && ch != 0xa && ch != 0xd && ch != 0x20) {
sb.append(ch);
isLeading = false;
}
else {
// for whitespaces, we skip all following ws
for (; i < len-1; i++) {
ch = content.charAt(i+1);
if (ch != 0x9 && ch != 0xa && ch != 0xd && ch != 0x20)
break;
}
// if it's not a leading or tailing ws, then append a space
if (i < len - 1 && !isLeading)
sb.append((char)0x20);
}
}
}
return sb.toString();
}
// normalize the string according to the whiteSpace facet
protected String normalize(Object content, short ws) {
if (content == null)
return null;
// If pattern is not defined, we can skip some of the normalization.
// Otherwise we have to normalize the data for correct result of
// pattern validation.
if ( (fFacetsDefined & FACET_PATTERN ) == 0 ) {
short norm_type = fDVNormalizeType[fValidationDV];
if (norm_type == NORMALIZE_NONE) {
return content.toString();
}
else if (norm_type == NORMALIZE_TRIM) {
return XercesXMLChar.trim(content.toString());
}
}
if (!(content instanceof StringBuilder)) {
String strContent = content.toString();
return normalize(strContent, ws);
}
StringBuilder sb = (StringBuilder)content;
int len = sb.length();
if (len == 0)
return "";
if (ws == WS_PRESERVE)
return sb.toString();
if (ws == WS_REPLACE) {
char ch;
// when it's replace, just replace #x9, #xa, #xd by #x20
for (int i = 0; i < len; i++) {
ch = sb.charAt(i);
if (ch == 0x9 || ch == 0xa || ch == 0xd)
sb.setCharAt(i, (char)0x20);
}
} else {
char ch;
int i, j = 0;
boolean isLeading = true;
// when it's collapse
for (i = 0; i < len; i++) {
ch = sb.charAt(i);
// append real characters, so we passed leading ws
if (ch != 0x9 && ch != 0xa && ch != 0xd && ch != 0x20) {
sb.setCharAt(j++, ch);
isLeading = false;
}
else {
// for whitespaces, we skip all following ws
for (; i < len-1; i++) {
ch = sb.charAt(i+1);
if (ch != 0x9 && ch != 0xa && ch != 0xd && ch != 0x20)
break;
}
// if it's not a leading or tailing ws, then append a space
if (i < len - 1 && !isLeading)
sb.setCharAt(j++, (char)0x20);
}
}
sb.setLength(j);
}
return sb.toString();
}
void reportError(String key, Object[] args) throws InvalidDatatypeFacetException {
throw new InvalidDatatypeFacetException(key, args);
}
private String whiteSpaceValue(short ws){
return WS_FACET_STRING[ws];
}
private void calcFundamentalFacets() {
setOrdered();
setNumeric();
setCardinality();
}
private void setOrdered(){
// When {variety} is atomic, {value} is inherited from {value} of {base type definition}. For all "primitive" types {value} is as specified in the table in Fundamental Facets (C.1).
this.fOrdered = fBase.fOrdered;
}//setOrdered
private void setNumeric(){
this.fNumeric = fBase.fNumeric;
}//setNumeric
private void setCardinality(){
if(fBase.fFinite){
this.fFinite = true;
}
else {// (!fBase.fFinite)
if( (((this.fFacetsDefined & FACET_MININCLUSIVE) != 0 ) || ((this.fFacetsDefined & FACET_MINEXCLUSIVE) != 0 ))
&& (((this.fFacetsDefined & FACET_MAXINCLUSIVE) != 0 ) || ((this.fFacetsDefined & FACET_MAXEXCLUSIVE) != 0 )) ){
this.fFinite = false;
}
else{
this.fFinite = false;
}
}
}//setCardinality
static final XSSimpleTypeDecl fAnySimpleType = new XSSimpleTypeDecl(null, "anySimpleType", DV_ANYSIMPLETYPE, ORDERED_FALSE, true, false, true, XSConstants.ANYSIMPLETYPE_DT);
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return this.fTargetNamespace+"," +this.fTypeName;
}
} // class XSSimpleTypeDecl
|
apache/oozie | 37,350 | core/src/test/java/org/apache/oozie/servlet/TestV2ValidateServlet.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie.servlet;
import org.apache.hadoop.fs.Path;
import org.apache.oozie.client.rest.JsonTags;
import org.apache.oozie.client.rest.RestConstants;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
public class TestV2ValidateServlet extends DagServletTestCase {
static {
new V2ValidateServlet();
}
private static final boolean IS_SECURITY_ENABLED = false;
@Override
protected void setUp() throws Exception {
super.setUp();
}
public void testValidateWF() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "workflow.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n" +
"<workflow-app xmlns=\"uri:oozie:workflow:0.3\" name=\"test\">\n" +
" <start to=\"shell-1\"/>\n" +
" <action name=\"shell-1\">\n" +
" <shell xmlns=\"uri:oozie:shell-action:0.3\">\n" +
" <job-tracker>${jobTracker}</job-tracker>\n" +
" <name-node>${nameNode}</name-node>\n" +
" <exec>script-outstream.sh</exec>\n" +
" <argument></argument>\n" +
" <file>script-outstream.sh</file>\n" +
" <capture-output/>\n" +
" </shell>\n" +
" <ok to=\"end\"/>\n" +
" <error to=\"fail\"/>\n" +
" </action>\n" +
" <kill name=\"fail\">\n" +
" <message>failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>\n" +
" </kill>\n" +
" <end name=\"end\"/>\n" +
"</workflow-app>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_OK, conn.getResponseCode());
JSONObject obj = (JSONObject) JSONValue.parse(new InputStreamReader(conn.getInputStream(),
StandardCharsets.UTF_8));
assertEquals("Valid workflow-app", obj.get(JsonTags.VALIDATE));
return null;
}
});
}
public void testValidateWFonHDFS() throws Exception {
String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n" +
"<workflow-app xmlns=\"uri:oozie:workflow:0.3\" name=\"test\">\n" +
" <start to=\"shell-1\"/>\n" +
" <action name=\"shell-1\">\n" +
" <shell xmlns=\"uri:oozie:shell-action:0.3\">\n" +
" <job-tracker>${jobTracker}</job-tracker>\n" +
" <name-node>${nameNode}</name-node>\n" +
" <exec>script-outstream.sh</exec>\n" +
" <argument></argument>\n" +
" <file>script-outstream.sh</file>\n" +
" <capture-output/>\n" +
" </shell>\n" +
" <ok to=\"end\"/>\n" +
" <error to=\"fail\"/>\n" +
" </action>\n" +
" <kill name=\"fail\">\n" +
" <message>failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>\n" +
" </kill>\n" +
" <end name=\"end\"/>\n" +
"</workflow-app>";
final Path path = new Path(getFsTestCaseDir(), "workflow.xml");
OutputStreamWriter writer = new OutputStreamWriter(getFileSystem().create(path),
StandardCharsets.UTF_8);
writer.write(xml.toCharArray());
writer.flush();
writer.close();
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", path.toString());
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
assertEquals(HttpServletResponse.SC_OK, conn.getResponseCode());
JSONObject obj = (JSONObject) JSONValue.parse(new InputStreamReader(conn.getInputStream(),
StandardCharsets.UTF_8));
assertEquals("Valid workflow-app", obj.get(JsonTags.VALIDATE));
return null;
}
});
}
public void testValidateWFNegative() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "workflow.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<workflow-app xmlns=\"uri:oozie:workflow:0.3\" name=\"test\">\n" +
" <start to=\"shell-1\"/>\n" +
" <action name=\"shell-1\">\n" +
" <shell xmlns=\"uri:oozie:shell-action:0.3\">\n" +
" <name-node2>${nameNode}</name-node2>\n" +
" <exec>script-outstream.sh</exec>\n" +
" <argument></argument>\n" +
" <file>script-outstream.sh</file>\n" +
" <capture-output/>\n" +
" </shell>\n" +
" <ok to=\"end\"/>\n" +
" <error to=\"fail\"/>\n" +
" </action>\n" +
" <kill name=\"fail\">\n" +
" <message>failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>\n" +
" </kill>\n" +
" <end name=\"end\"/>\n" +
"</workflow-app>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
String error = conn.getHeaderField(RestConstants.OOZIE_ERROR_CODE);
String message = conn.getHeaderField(RestConstants.OOZIE_ERROR_MESSAGE);
assertEquals("E0701", error);
assertEquals(true, message.contains("Invalid content was found starting with element 'name-node2'"));
return null;
}
});
}
public void testValidateWFNegative2() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "workflow.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<workflow-app xmlns=\"uri:oozie:workflow:0.3\" name=\"test\">\n" +
" <start to=\"shell-1\"/>\n" +
" <action name=\"shell-1\">\n" +
" <shell xmlns=\"uri:oozie:shell-action:0.3\">\n" +
" <name-node>${nameNode}</name-node>\n" +
" <exec>script-outstream.sh</exec>\n" +
" <argument></argument>\n" +
" <file>script-outstream.sh</file>\n" +
" <capture-output/>\n" +
" </shell>\n" +
" <ok to=\"end\"/>\n" +
" <error to=\"fail\"/>\n" +
" </action>\n" +
" <kill-invalid name=\"fail\">\n" +
" <message>failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>\n" +
" </kill-invalid>\n" +
" <end name=\"end\"/>\n" +
"</workflow-app>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
String error = conn.getHeaderField(RestConstants.OOZIE_ERROR_CODE);
String message = conn.getHeaderField(RestConstants.OOZIE_ERROR_MESSAGE);
assertEquals("E0701", error);
assertEquals(true, message.contains("Invalid content was found starting with element 'kill-invalid'"));
return null;
}
});
}
public void testValidateWFNegative3() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "workflow.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<workflow-app-invalid xmlns=\"uri:oozie:workflow:0.3\" name=\"test\">\n" +
" <start to=\"shell-1\"/>\n" +
" <action name=\"shell-1\">\n" +
" <shell xmlns=\"uri:oozie:shell-action:0.3\">\n" +
" <name-node>${nameNode}</name-node>\n" +
" <exec>script-outstream.sh</exec>\n" +
" <argument></argument>\n" +
" <file>script-outstream.sh</file>\n" +
" <capture-output/>\n" +
" </shell>\n" +
" <ok to=\"end\"/>\n" +
" <error to=\"fail\"/>\n" +
" </action>\n" +
" <kill name=\"fail\">\n" +
" <message>failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>\n" +
" </kill>\n" +
" <end name=\"end\"/>\n" +
"</workflow-app-invalid>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
String error = conn.getHeaderField(RestConstants.OOZIE_ERROR_CODE);
String message = conn.getHeaderField(RestConstants.OOZIE_ERROR_MESSAGE);
assertEquals("E0701", error);
assertEquals(true, message.contains("Cannot find the declaration of element 'workflow-app-invalid"));
return null;
}
});
}
public void testValidateWFNegative4() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "workflow.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<workflow-app xmlns=\"uri:oozie:workflow:0.3\" name=\"test\">\n" +
" <start to=\"shell-1\"/>\n" +
" <start to=\"shell-1\"/>\n" +
" <action name=\"shell-1\">\n" +
" <shell xmlns=\"uri:oozie:shell-action:0.3\">\n" +
" <name-node>${nameNode}</name-node>\n" +
" <exec>script-outstream.sh</exec>\n" +
" <argument></argument>\n" +
" <file>script-outstream.sh</file>\n" +
" <capture-output/>\n" +
" </shell>\n" +
" <ok to=\"end\"/>\n" +
" <error to=\"fail\"/>\n" +
" </action>\n" +
" <kill name=\"fail\">\n" +
" <message>failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>\n" +
" </kill>\n" +
" <end name=\"end\"/>\n" +
"</workflow-app>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
String error = conn.getHeaderField(RestConstants.OOZIE_ERROR_CODE);
String message = conn.getHeaderField(RestConstants.OOZIE_ERROR_MESSAGE);
assertEquals("E0701", error);
assertEquals(true, message.contains("cvc-complex-type.2.4.a: " +
"Invalid content was found starting with element 'start'"));
return null;
}
});
}
public void testValidateWFonHDFSNegative() throws Exception {
String xml = "<workflow-app xmlns=\"uri:oozie:workflow:0.3\" name=\"test\">\n" +
" <start to=\"shell-1\"/>\n" +
" <action name=\"shell-1\">\n" +
" <shell xmlns=\"uri:oozie:shell-action:0.3\">\n" +
" <name-node2>${nameNode}</name-node2>\n" +
" <exec>script-outstream.sh</exec>\n" +
" <argument></argument>\n" +
" <file>script-outstream.sh</file>\n" +
" <capture-output/>\n" +
" </shell>\n" +
" <ok to=\"end\"/>\n" +
" <error to=\"fail\"/>\n" +
" </action>\n" +
" <kill name=\"fail\">\n" +
" <message>failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>\n" +
" </kill>\n" +
" <end name=\"end\"/>\n" +
"</workflow-app>";
final Path path = new Path(getFsTestCaseDir(), "workflow.xml");
OutputStreamWriter writer = new OutputStreamWriter(getFileSystem().create(path),
StandardCharsets.UTF_8);
writer.write(xml.toCharArray());
writer.flush();
writer.close();
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", path.toString());
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
String error = conn.getHeaderField(RestConstants.OOZIE_ERROR_CODE);
String message = conn.getHeaderField(RestConstants.OOZIE_ERROR_MESSAGE);
assertEquals("E0701", error);
assertEquals(true, message.contains("Invalid content was found starting with element 'name-node2'"));
return null;
}
});
}
public void testValidateCoordinator() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "coordinator.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<coordinator-app name=\"coord-simple\" frequency=\"${coord:minutes(1)}\"\n" +
" start=\"${startTime}\" end=\"${endTime}\"\n" +
" timezone=\"Asia/Seoul\"\n" +
" xmlns=\"uri:oozie:coordinator:0.1\">\n" +
" <action>\n" +
" <workflow>\n" +
" <app-path>${nameNode}/user/seoeun/workflow-ndap/apps/v40/shell-outstream</app-path>\n" +
" </workflow>\n" +
" </action>\n" +
"</coordinator-app>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_OK, conn.getResponseCode());
JSONObject obj = (JSONObject) JSONValue.parse(new InputStreamReader(conn.getInputStream(),
StandardCharsets.UTF_8));
assertEquals("Valid workflow-app", obj.get(JsonTags.VALIDATE));
return null;
}
});
}
public void testValidateCoordinatorNegative1() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "coordinator.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<coordinator-app name=\"coord-simple\" frequency=\"${coord:minutes(1)}\"\n" +
" start=\"${startTime}\" end=\"${endTime}\"\n" +
" timezone=\"Asia/Seoul\"\n" +
" xmlns=\"uri:oozie:coordinator:0.1\">\n" +
" <action>\n" +
" <workflow>\n" +
" <app-path>${nameNode}/user/seoeun/workflow-ndap/apps/v40/shell-outstream</app-path>\n" +
" <action name=\"shell-1\">\n" +
" <shell xmlns=\"uri:oozie:shell-action:0.3\">\n" +
" <job-tracker>${jobTracker}</job-tracker>\n" +
" <name-node>${nameNode}</name-node>\n" +
" <exec>script-outstream.sh</exec>\n" +
" </shell>\n" +
" <ok to=\"end\"/>\n" +
" <error to=\"fail\"/>\n" +
" </action>\n" +
" </workflow>\n" +
" </action>\n" +
"</coordinator-app>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
String error = conn.getHeaderField(RestConstants.OOZIE_ERROR_CODE);
String message = conn.getHeaderField(RestConstants.OOZIE_ERROR_MESSAGE);
assertEquals("E0701", error);
assertEquals(true, message.contains("Invalid content was found starting with element 'action'"));
return null;
}
});
}
public void testValidateCoordinatorNegative2() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "coordinator.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<coordinator-app-invalid name=\"coord-simple\" frequency=\"${coord:minutes(1)}\"\n" +
" start=\"${startTime}\" end=\"${endTime}\"\n" +
" timezone=\"Asia/Seoul\"\n" +
" xmlns=\"uri:oozie:coordinator:0.1\">\n" +
" <action>\n" +
" <workflow>\n" +
" <app-path>${nameNode}/user/seoeun/workflow-ndap/apps/v40/shell-outstream</app-path>\n" +
" </workflow>\n" +
" </action>\n" +
"</coordinator-app-invalid>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
String error = conn.getHeaderField(RestConstants.OOZIE_ERROR_CODE);
String message = conn.getHeaderField(RestConstants.OOZIE_ERROR_MESSAGE);
assertEquals("E0701", error);
assertEquals(true, message.contains("Cannot find the declaration of element 'coordinator-app-invalid'"));
return null;
}
});
}
public void testValidateBundle() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "bundle.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<bundle-app name='test_bundle' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' "
+ "xmlns='uri:oozie:bundle:0.1'> "
+ "<controls> <kick-off-time>2009-02-02T00:00Z</kick-off-time> </controls> "
+ "<coordinator name='c12'> "
+ "<app-path>#app_path1</app-path>"
+ "<configuration> "
+ "<property> <name>START_TIME</name> <value>2009-02-01T00:00Z</value> </property> </configuration> "
+ "</coordinator></bundle-app>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_OK, conn.getResponseCode());
JSONObject obj = (JSONObject) JSONValue.parse(new InputStreamReader(conn.getInputStream(),
StandardCharsets.UTF_8));
assertEquals("Valid workflow-app", obj.get(JsonTags.VALIDATE));
return null;
}
});
}
public void testValidateBundleNegative1() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "bundle.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<bundle-app name='test_bundle' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' "
+ "xmlns='uri:oozie:bundle:0.1'> "
+ "<controls> <kick-off-time>2009-02-02T00:00Z</kick-off-time> </controls> "
+ "</bundle-app>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
String error = conn.getHeaderField(RestConstants.OOZIE_ERROR_CODE);
String message = conn.getHeaderField(RestConstants.OOZIE_ERROR_MESSAGE);
assertEquals("E0701", error);
assertEquals(true, message.contains("cvc-complex-type.2.4.b: The content of element 'bundle-app' is not " +
"complete. One of '{\"uri:oozie:bundle:0.1\":coordinator}' is expected"));
return null;
}
});
}
public void testValidateBundleNegative2() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "bundle.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<bundle-app-invalid name='test_bundle' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' "
+ "xmlns='uri:oozie:bundle:0.1'> "
+ "<controls> <kick-off-time>2009-02-02T00:00Z</kick-off-time> </controls> "
+ "<coordinator name='c12'> "
+ "<app-path>#app_path1</app-path>"
+ "<configuration> "
+ "<property> <name>START_TIME</name> <value>2009-02-01T00:00Z</value> </property> </configuration> "
+ "</coordinator></bundle-app-invalid>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
String error = conn.getHeaderField(RestConstants.OOZIE_ERROR_CODE);
String message = conn.getHeaderField(RestConstants.OOZIE_ERROR_MESSAGE);
assertEquals("E0701", error);
assertEquals(true, message.matches("^.*cvc-elt.1(.a)?: " +
"Cannot find the declaration of element 'bundle-app-invalid'.*$"));
return null;
}
});
}
public void testValidateSla() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "workflow.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<workflow-app xmlns=\"uri:oozie:workflow:0.5\" xmlns:sla=\"uri:oozie:sla:0.2\" name=\"test\">\n" +
" <start to=\"shell-1\"/>\n" +
" <action name=\"shell-1\">\n" +
" <shell xmlns=\"uri:oozie:shell-action:0.3\">\n" +
" <job-tracker>${jobTracker}</job-tracker>\n" +
" <name-node>${nameNode}</name-node>\n" +
" <exec>script-outstream.sh</exec>\n" +
" <argument></argument>\n" +
" <file>script-outstream.sh</file>\n" +
" <capture-output/>\n" +
" </shell>\n" +
" <ok to=\"end\"/>\n" +
" <error to=\"fail\"/>\n" +
" <sla:info>\n" +
" <sla:nominal-time>${nominal_time}</sla:nominal-time>\n" +
" <sla:should-start>${10 * MINUTES}</sla:should-start>\n" +
" <sla:should-end>${30 * MINUTES}</sla:should-end>\n" +
" <sla:max-duration>${30 * MINUTES}</sla:max-duration>\n" +
" <sla:alert-events>start_miss,end_met,end_miss</sla:alert-events>\n" +
" <sla:alert-contact>joe@example.com</sla:alert-contact>\n" +
" </sla:info>\n" +
" </action>\n" +
" <kill name=\"fail\">\n" +
" <message>failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>\n" +
" </kill>\n" +
" <end name=\"end\"/>\n" +
"</workflow-app>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_OK, conn.getResponseCode());
JSONObject obj = (JSONObject) JSONValue.parse(new InputStreamReader(conn.getInputStream(),
StandardCharsets.UTF_8));
assertEquals("Valid workflow-app", obj.get(JsonTags.VALIDATE));
return null;
}
});
}
public void testValidateSlaNegative() throws Exception {
runTest("/v2/validate", V2ValidateServlet.class, IS_SECURITY_ENABLED, new Callable<Void>() {
public Void call() throws Exception {
Map<String, String> params = new HashMap<String, String>();
params.put("file", "workflow.xml");
params.put("user", getTestUser());
URL url = createURL("", params);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
conn.setDoOutput(true);
String xml = "<workflow-app xmlns=\"uri:oozie:workflow:0.5\" xmlns:sla=\"uri:oozie:sla:0.2\" name=\"test\">\n" +
" <start to=\"shell-1\"/>\n" +
" <action name=\"shell-1\">\n" +
" <shell xmlns=\"uri:oozie:shell-action:0.3\">\n" +
" <job-tracker>${jobTracker}</job-tracker>\n" +
" <name-node>${nameNode}</name-node>\n" +
" <exec>script-outstream.sh</exec>\n" +
" <argument></argument>\n" +
" <file>script-outstream.sh</file>\n" +
" <capture-output/>\n" +
" </shell>\n" +
" <ok to=\"end\"/>\n" +
" <error to=\"fail\"/>\n" +
" <sla:info>\n" +
" <sla:app-name>${nominal_time}</sla:app-name>\n" +
" <sla:nominal-time>${nominal_time}</sla:nominal-time>\n" +
" <sla:should-start>${10 * MINUTES}</sla:should-start>\n" +
" <sla:should-end>${30 * MINUTES}</sla:should-end>\n" +
" <sla:max-duration>${30 * MINUTES}</sla:max-duration>\n" +
" <sla:alert-events>start_miss,end_met,end_miss</sla:alert-events>\n" +
" <sla:alert-contact>joe@example.com</sla:alert-contact>\n" +
" </sla:info>\n" +
" </action>\n" +
" <kill name=\"fail\">\n" +
" <message>failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>\n" +
" </kill>\n" +
" <end name=\"end\"/>\n" +
"</workflow-app>";
writeXML(conn.getOutputStream(), xml);
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
String error = conn.getHeaderField(RestConstants.OOZIE_ERROR_CODE);
String message = conn.getHeaderField(RestConstants.OOZIE_ERROR_MESSAGE);
assertEquals("E0701", error);
assertEquals(true, message.contains("Invalid content was found starting with element 'sla:app-name'"));
return null;
}
});
}
private void writeXML(OutputStream outputStream, String xml) throws IOException {
outputStream.write(xml.getBytes(StandardCharsets.UTF_8));
}
}
|
apache/pinot | 38,415 | pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/BinaryOperatorTransformFunction.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.core.operator.transform.function;
import com.google.common.base.Preconditions;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import org.apache.pinot.common.function.TransformFunctionType;
import org.apache.pinot.common.request.context.ExpressionContext;
import org.apache.pinot.common.request.context.LiteralContext;
import org.apache.pinot.common.request.context.predicate.EqPredicate;
import org.apache.pinot.common.request.context.predicate.NotEqPredicate;
import org.apache.pinot.common.request.context.predicate.Predicate;
import org.apache.pinot.common.request.context.predicate.RangePredicate;
import org.apache.pinot.core.operator.ColumnContext;
import org.apache.pinot.core.operator.blocks.ValueBlock;
import org.apache.pinot.core.operator.filter.predicate.PredicateEvaluator;
import org.apache.pinot.core.operator.filter.predicate.PredicateEvaluatorProvider;
import org.apache.pinot.core.operator.transform.TransformResultMetadata;
import org.apache.pinot.core.query.optimizer.filter.NumericalFilterOptimizer;
import org.apache.pinot.segment.spi.index.reader.Dictionary;
import org.apache.pinot.spi.data.FieldSpec.DataType;
import org.apache.pinot.spi.utils.ByteArray;
/**
* <code>BinaryOperatorTransformFunction</code> abstracts common functions for binary operators (=, !=, >=, >, <=, <).
* The results are BOOLEAN type.
*
* TODO: Support MV columns
*/
public abstract class BinaryOperatorTransformFunction extends BaseTransformFunction {
private static final int EQUALS = 0;
private static final int GREATER_THAN_OR_EQUAL = 1;
private static final int GREATER_THAN = 2;
private static final int LESS_THAN = 3;
private static final int LESS_THAN_OR_EQUAL = 4;
private static final int NOT_EQUAL = 5;
private static final ExpressionContext LHS_PLACEHOLDER = ExpressionContext.forIdentifier("lhs");
protected final int _op;
protected final TransformFunctionType _transformFunctionType;
protected TransformFunction _leftTransformFunction;
protected TransformFunction _rightTransformFunction;
protected DataType _leftStoredType;
protected DataType _rightStoredType;
protected PredicateEvaluator _predicateEvaluator;
protected boolean _alwaysTrue;
protected boolean _alwaysFalse;
protected boolean _alwaysNull;
protected BinaryOperatorTransformFunction(TransformFunctionType transformFunctionType) {
// translate to integer in [0, 5] for guaranteed tableswitch
switch (transformFunctionType) {
case EQUALS:
_op = EQUALS;
break;
case GREATER_THAN_OR_EQUAL:
_op = GREATER_THAN_OR_EQUAL;
break;
case GREATER_THAN:
_op = GREATER_THAN;
break;
case LESS_THAN:
_op = LESS_THAN;
break;
case LESS_THAN_OR_EQUAL:
_op = LESS_THAN_OR_EQUAL;
break;
case NOT_EQUALS:
_op = NOT_EQUAL;
break;
default:
throw new IllegalArgumentException("non-binary transform function provided: " + transformFunctionType);
}
_transformFunctionType = transformFunctionType;
}
@Override
public String getName() {
return _transformFunctionType.getName();
}
@Override
public void init(List<TransformFunction> arguments, Map<String, ColumnContext> columnContextMap) {
super.init(arguments, columnContextMap);
// Check that there are exact 2 arguments
Preconditions.checkArgument(arguments.size() == 2,
"Exact 2 arguments are required for binary operator transform function");
_leftTransformFunction = arguments.get(0);
_rightTransformFunction = arguments.get(1);
DataType leftDataType = _leftTransformFunction.getResultMetadata().getDataType();
_leftStoredType = leftDataType.getStoredType();
_rightStoredType = _rightTransformFunction.getResultMetadata().getDataType().getStoredType();
// Data type check: left and right types should be compatible.
if (_leftStoredType == DataType.BYTES || _rightStoredType == DataType.BYTES) {
Preconditions.checkState(_leftStoredType == _rightStoredType, String.format(
"Unsupported data type for comparison: [Left Transform Function [%s] result type is [%s], Right Transform "
+ "Function [%s] result type is [%s]]", _leftTransformFunction.getName(), _leftStoredType,
_rightTransformFunction.getName(), _rightStoredType));
}
// Create predicate evaluator when the right side is a literal
if (_rightTransformFunction instanceof LiteralTransformFunction) {
_predicateEvaluator = createPredicateEvaluator(leftDataType, _leftTransformFunction.getDictionary(),
((LiteralTransformFunction) _rightTransformFunction).getLiteralContext());
}
}
/**
* Creates a predicate evaluator for the binary operator. Returns {@code null} when the binary operator always
* evaluates to the same value (true/false/null) where the predicate evaluator is not needed.
*/
@Nullable
private PredicateEvaluator createPredicateEvaluator(DataType leftDataType, @Nullable Dictionary leftDictionary,
LiteralContext rightLiteral) {
if (rightLiteral.isNull()) {
_alwaysNull = true;
return null;
}
Predicate predicate = createPredicate(leftDataType, rightLiteral);
if (predicate == null) {
return null;
}
return PredicateEvaluatorProvider.getPredicateEvaluator(predicate, leftDictionary, leftDataType);
}
/**
* Creates a predicate for the binary operator. Returns {@code null} when the binary operator always evaluates to the
* same value (true/false/null) where the predicate is not needed.
*
* It might rewrite the right value similar to {@link NumericalFilterOptimizer}.
* TODO: Extract the common logic.
*/
@Nullable
private Predicate createPredicate(DataType leftDataType, LiteralContext rightLiteral) {
DataType rightDataType = rightLiteral.getType();
if (!leftDataType.isNumeric()) {
return createPredicate(_op, rightLiteral.getStringValue(), rightDataType);
}
switch (rightDataType) {
case LONG: {
long actual = rightLiteral.getLongValue();
switch (leftDataType) {
case INT: {
int converted = (int) actual;
int comparison = Long.compare(actual, converted);
// Set converted value to boundary value if overflow
if (comparison != 0) {
converted = comparison > 0 ? Integer.MAX_VALUE : Integer.MIN_VALUE;
}
return createIntPredicate(converted, comparison);
}
case FLOAT: {
float converted = (float) actual;
int comparison = BigDecimal.valueOf(actual).compareTo(BigDecimal.valueOf(converted));
return createFloatPredicate(converted, comparison);
}
case DOUBLE: {
double converted = (double) actual;
int comparison = BigDecimal.valueOf(actual).compareTo(BigDecimal.valueOf(converted));
return createDoublePredicate(converted, comparison);
}
default:
return createPredicate(_op, Long.toString(actual), DataType.LONG);
}
}
case FLOAT: {
float actual = rightLiteral.getFloatValue();
switch (leftDataType) {
case INT: {
int converted = (int) actual;
int comparison = Double.compare(actual, converted);
return createIntPredicate(converted, comparison);
}
case LONG: {
long converted = (long) actual;
int comparison = BigDecimal.valueOf(actual).compareTo(BigDecimal.valueOf(converted));
return createLongPredicate(converted, comparison);
}
default:
return createPredicate(_op, Float.toString(actual), DataType.FLOAT);
}
}
case DOUBLE: {
double actual = rightLiteral.getDoubleValue();
switch (leftDataType) {
case INT: {
int converted = (int) actual;
int comparison = Double.compare(actual, converted);
return createIntPredicate(converted, comparison);
}
case LONG: {
long converted = (long) actual;
int comparison = BigDecimal.valueOf(actual).compareTo(BigDecimal.valueOf(converted));
return createLongPredicate(converted, comparison);
}
case FLOAT: {
float converted = (float) actual;
int comparison = Double.compare(actual, converted);
return createFloatPredicate(converted, comparison);
}
default:
return createPredicate(_op, Double.toString(actual), DataType.DOUBLE);
}
}
case BIG_DECIMAL:
case STRING: {
BigDecimal actual = rightLiteral.getBigDecimalValue();
switch (leftDataType) {
case INT: {
int converted = actual.intValue();
int comparison = actual.compareTo(BigDecimal.valueOf(converted));
// Set converted value to boundary value if overflow
if (comparison != 0) {
if (actual.compareTo(BigDecimal.valueOf(Integer.MAX_VALUE)) > 0) {
converted = Integer.MAX_VALUE;
} else if (actual.compareTo(BigDecimal.valueOf(Integer.MIN_VALUE)) < 0) {
converted = Integer.MIN_VALUE;
}
}
return createIntPredicate(converted, comparison);
}
case LONG: {
long converted = actual.longValue();
int comparison = actual.compareTo(BigDecimal.valueOf(converted));
// Set converted value to boundary value if overflow
if (comparison != 0) {
if (actual.compareTo(BigDecimal.valueOf(Long.MAX_VALUE)) > 0) {
converted = Long.MAX_VALUE;
} else if (actual.compareTo(BigDecimal.valueOf(Long.MIN_VALUE)) < 0) {
converted = Long.MIN_VALUE;
}
}
return createLongPredicate(converted, comparison);
}
case FLOAT: {
float converted = actual.floatValue();
int comparison = actual.compareTo(BigDecimal.valueOf(converted));
return createFloatPredicate(converted, comparison);
}
case DOUBLE: {
double converted = actual.doubleValue();
int comparison = actual.compareTo(BigDecimal.valueOf(converted));
return createDoublePredicate(converted, comparison);
}
default:
return createPredicate(_op, actual.toString(), DataType.BIG_DECIMAL);
}
}
default:
return createPredicate(_op, rightLiteral.getStringValue(), rightDataType);
}
}
private Predicate createPredicate(int operator, String value, DataType dataType) {
switch (operator) {
case EQUALS:
return new EqPredicate(LHS_PLACEHOLDER, value);
case NOT_EQUAL:
return new NotEqPredicate(LHS_PLACEHOLDER, value);
case GREATER_THAN:
return new RangePredicate(LHS_PLACEHOLDER, false, value, false, RangePredicate.UNBOUNDED, dataType);
case GREATER_THAN_OR_EQUAL:
return new RangePredicate(LHS_PLACEHOLDER, true, value, false, RangePredicate.UNBOUNDED, dataType);
case LESS_THAN:
return new RangePredicate(LHS_PLACEHOLDER, false, RangePredicate.UNBOUNDED, false, value, dataType);
case LESS_THAN_OR_EQUAL:
return new RangePredicate(LHS_PLACEHOLDER, false, RangePredicate.UNBOUNDED, true, value, dataType);
default:
throw new IllegalStateException();
}
}
@Nullable
private Predicate createIntPredicate(int converted, int comparison) {
if (comparison == 0) {
return createPredicate(_op, Integer.toString(converted), DataType.INT);
}
switch (_op) {
case EQUALS:
_alwaysFalse = true;
return null;
case NOT_EQUAL:
_alwaysTrue = true;
return null;
case GREATER_THAN:
case GREATER_THAN_OR_EQUAL:
if (comparison > 0 && converted == Integer.MAX_VALUE) {
// col > Integer.MAX_VALUE
_alwaysFalse = true;
return null;
} else if (comparison < 0 && converted == Integer.MIN_VALUE) {
// col >= Integer.MIN_VALUE
_alwaysTrue = true;
return null;
} else {
return createPredicate(getOperator(comparison), Integer.toString(converted), DataType.INT);
}
case LESS_THAN:
case LESS_THAN_OR_EQUAL:
if (comparison > 0 && converted == Integer.MAX_VALUE) {
// col <= Integer.MAX_VALUE
_alwaysTrue = true;
return null;
} else if (comparison < 0 && converted == Integer.MIN_VALUE) {
// col < Integer.MIN_VALUE
_alwaysFalse = true;
return null;
} else {
return createPredicate(getOperator(comparison), Integer.toString(converted), DataType.INT);
}
default:
throw new IllegalStateException();
}
}
@Nullable
private Predicate createLongPredicate(long converted, int comparison) {
if (comparison == 0) {
return createPredicate(_op, Long.toString(converted), DataType.LONG);
}
switch (_op) {
case EQUALS:
_alwaysFalse = true;
return null;
case NOT_EQUAL:
_alwaysTrue = true;
return null;
case GREATER_THAN:
case GREATER_THAN_OR_EQUAL:
if (comparison > 0 && converted == Long.MAX_VALUE) {
// col > Long.MAX_VALUE
_alwaysFalse = true;
return null;
} else if (comparison < 0 && converted == Long.MIN_VALUE) {
// col >= Long.MIN_VALUE
_alwaysTrue = true;
return null;
} else {
return createPredicate(getOperator(comparison), Long.toString(converted), DataType.LONG);
}
case LESS_THAN:
case LESS_THAN_OR_EQUAL:
if (comparison > 0 && converted == Long.MAX_VALUE) {
// col <= Long.MAX_VALUE
_alwaysTrue = true;
return null;
} else if (comparison < 0 && converted == Long.MIN_VALUE) {
// col < Long.MIN_VALUE
_alwaysFalse = true;
return null;
} else {
return createPredicate(getOperator(comparison), Long.toString(converted), DataType.LONG);
}
default:
throw new IllegalStateException();
}
}
@Nullable
private Predicate createFloatPredicate(float converted, int comparison) {
if (comparison == 0) {
return createPredicate(_op, Float.toString(converted), DataType.FLOAT);
}
switch (_op) {
case EQUALS:
_alwaysFalse = true;
return null;
case NOT_EQUAL:
_alwaysTrue = true;
return null;
default:
return createPredicate(getOperator(comparison), Float.toString(converted), DataType.FLOAT);
}
}
@Nullable
private Predicate createDoublePredicate(double converted, int comparison) {
if (comparison == 0) {
return createPredicate(_op, Double.toString(converted), DataType.DOUBLE);
}
switch (_op) {
case EQUALS:
_alwaysFalse = true;
return null;
case NOT_EQUAL:
_alwaysTrue = true;
return null;
default:
return createPredicate(getOperator(comparison), Double.toString(converted), DataType.DOUBLE);
}
}
/**
* Returns the operator (int value) for the given comparison result of actual value and converted value.
*/
private int getOperator(int comparison) {
assert comparison != 0;
if (comparison > 0) {
// Actual value greater than converted value
// col >= actual -> col > converted
// col < actual -> col <= converted
if (_op == GREATER_THAN_OR_EQUAL) {
return GREATER_THAN;
}
if (_op == LESS_THAN) {
return LESS_THAN_OR_EQUAL;
}
} else {
// Actual value less than converted value
// col > actual -> col >= converted
// col <= actual -> col < converted
if (_op == GREATER_THAN) {
return GREATER_THAN_OR_EQUAL;
}
if (_op == LESS_THAN_OR_EQUAL) {
return LESS_THAN;
}
}
return _op;
}
@Override
public TransformResultMetadata getResultMetadata() {
return BOOLEAN_SV_NO_DICTIONARY_METADATA;
}
@Override
public int[] transformToIntValuesSV(ValueBlock valueBlock) {
int length = valueBlock.getNumDocs();
initIntValuesSV(length);
if (_alwaysTrue) {
Arrays.fill(_intValuesSV, 0, length, 1);
return _intValuesSV;
}
if (_alwaysFalse || _alwaysNull) {
Arrays.fill(_intValuesSV, 0, length, 0);
return _intValuesSV;
}
if (_predicateEvaluator != null) {
if (_predicateEvaluator.isDictionaryBased()) {
int[] dictIds = _leftTransformFunction.transformToDictIdsSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = _predicateEvaluator.applySV(dictIds[i]) ? 1 : 0;
}
} else {
switch (_leftStoredType) {
case INT:
int[] intValues = _leftTransformFunction.transformToIntValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = _predicateEvaluator.applySV(intValues[i]) ? 1 : 0;
}
break;
case LONG:
long[] longValues = _leftTransformFunction.transformToLongValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = _predicateEvaluator.applySV(longValues[i]) ? 1 : 0;
}
break;
case FLOAT:
float[] floatValues = _leftTransformFunction.transformToFloatValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = _predicateEvaluator.applySV(floatValues[i]) ? 1 : 0;
}
break;
case DOUBLE:
double[] doubleValues = _leftTransformFunction.transformToDoubleValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = _predicateEvaluator.applySV(doubleValues[i]) ? 1 : 0;
}
break;
case BIG_DECIMAL:
BigDecimal[] bigDecimalValues = _leftTransformFunction.transformToBigDecimalValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = _predicateEvaluator.applySV(bigDecimalValues[i]) ? 1 : 0;
}
break;
case STRING:
String[] stringValues = _leftTransformFunction.transformToStringValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = _predicateEvaluator.applySV(stringValues[i]) ? 1 : 0;
}
break;
case BYTES:
byte[][] bytesValues = _leftTransformFunction.transformToBytesValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = _predicateEvaluator.applySV(bytesValues[i]) ? 1 : 0;
}
break;
case UNKNOWN:
fillResultUnknown(length);
break;
default:
throw illegalState();
}
}
} else {
switch (_leftStoredType) {
case INT:
fillResultInt(valueBlock, length);
break;
case LONG:
fillResultLong(valueBlock, length);
break;
case FLOAT:
fillResultFloat(valueBlock, length);
break;
case DOUBLE:
fillResultDouble(valueBlock, length);
break;
case BIG_DECIMAL:
fillResultBigDecimal(valueBlock, length);
break;
case STRING:
fillResultString(valueBlock, length);
break;
case BYTES:
fillResultBytes(valueBlock, length);
break;
case UNKNOWN:
fillResultUnknown(length);
break;
default:
throw illegalState();
}
}
return _intValuesSV;
}
private void fillResultInt(ValueBlock valueBlock, int length) {
int[] leftIntValues = _leftTransformFunction.transformToIntValuesSV(valueBlock);
switch (_rightStoredType) {
case INT:
fillIntResultArray(valueBlock, leftIntValues, length);
break;
case LONG:
fillLongResultArray(valueBlock, leftIntValues, length);
break;
case FLOAT:
fillFloatResultArray(valueBlock, leftIntValues, length);
break;
case DOUBLE:
fillDoubleResultArray(valueBlock, leftIntValues, length);
break;
case BIG_DECIMAL:
fillBigDecimalResultArray(valueBlock, leftIntValues, length);
break;
case STRING:
fillStringResultArray(valueBlock, leftIntValues, length);
break;
case UNKNOWN:
fillResultUnknown(length);
break;
default:
throw illegalState();
}
}
private void fillResultLong(ValueBlock valueBlock, int length) {
long[] leftLongValues = _leftTransformFunction.transformToLongValuesSV(valueBlock);
switch (_rightStoredType) {
case INT:
fillIntResultArray(valueBlock, leftLongValues, length);
break;
case LONG:
fillLongResultArray(valueBlock, leftLongValues, length);
break;
case FLOAT:
fillFloatResultArray(valueBlock, leftLongValues, length);
break;
case DOUBLE:
fillDoubleResultArray(valueBlock, leftLongValues, length);
break;
case BIG_DECIMAL:
fillBigDecimalResultArray(valueBlock, leftLongValues, length);
break;
case STRING:
fillStringResultArray(valueBlock, leftLongValues, length);
break;
case UNKNOWN:
fillResultUnknown(length);
break;
default:
throw illegalState();
}
}
private void fillResultFloat(ValueBlock valueBlock, int length) {
float[] leftFloatValues = _leftTransformFunction.transformToFloatValuesSV(valueBlock);
switch (_rightStoredType) {
case INT:
fillIntResultArray(valueBlock, leftFloatValues, length);
break;
case LONG:
fillLongResultArray(valueBlock, leftFloatValues, length);
break;
case FLOAT:
fillFloatResultArray(valueBlock, leftFloatValues, length);
break;
case DOUBLE:
fillDoubleResultArray(valueBlock, leftFloatValues, length);
break;
case BIG_DECIMAL:
fillBigDecimalResultArray(valueBlock, leftFloatValues, length);
break;
case STRING:
fillStringResultArray(valueBlock, leftFloatValues, length);
break;
case UNKNOWN:
fillResultUnknown(length);
break;
default:
throw illegalState();
}
}
private void fillResultDouble(ValueBlock valueBlock, int length) {
double[] leftDoubleValues = _leftTransformFunction.transformToDoubleValuesSV(valueBlock);
switch (_rightStoredType) {
case INT:
fillIntResultArray(valueBlock, leftDoubleValues, length);
break;
case LONG:
fillLongResultArray(valueBlock, leftDoubleValues, length);
break;
case FLOAT:
fillFloatResultArray(valueBlock, leftDoubleValues, length);
break;
case DOUBLE:
fillDoubleResultArray(valueBlock, leftDoubleValues, length);
break;
case BIG_DECIMAL:
fillBigDecimalResultArray(valueBlock, leftDoubleValues, length);
break;
case STRING:
fillStringResultArray(valueBlock, leftDoubleValues, length);
break;
case UNKNOWN:
fillResultUnknown(length);
break;
default:
throw illegalState();
}
}
private void fillResultBigDecimal(ValueBlock valueBlock, int length) {
BigDecimal[] leftBigDecimalValues = _leftTransformFunction.transformToBigDecimalValuesSV(valueBlock);
switch (_rightStoredType) {
case INT:
fillIntResultArray(valueBlock, leftBigDecimalValues, length);
break;
case LONG:
fillLongResultArray(valueBlock, leftBigDecimalValues, length);
break;
case FLOAT:
fillFloatResultArray(valueBlock, leftBigDecimalValues, length);
break;
case DOUBLE:
fillDoubleResultArray(valueBlock, leftBigDecimalValues, length);
break;
case STRING:
fillStringResultArray(valueBlock, leftBigDecimalValues, length);
break;
case BIG_DECIMAL:
fillBigDecimalResultArray(valueBlock, leftBigDecimalValues, length);
break;
case UNKNOWN:
fillResultUnknown(length);
break;
default:
throw illegalState();
}
}
private IllegalStateException illegalState() {
throw new IllegalStateException(String.format(
"Unsupported data type for comparison: [Left Transform Function [%s] result type is [%s], Right "
+ "Transform Function [%s] result type is [%s]]", _leftTransformFunction.getName(), _leftStoredType,
_rightTransformFunction.getName(), _rightStoredType));
}
private void fillResultString(ValueBlock valueBlock, int length) {
String[] leftStringValues = _leftTransformFunction.transformToStringValuesSV(valueBlock);
String[] rightStringValues = _rightTransformFunction.transformToStringValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(leftStringValues[i].compareTo(rightStringValues[i]));
}
}
private void fillResultBytes(ValueBlock valueBlock, int length) {
byte[][] leftBytesValues = _leftTransformFunction.transformToBytesValuesSV(valueBlock);
byte[][] rightBytesValues = _rightTransformFunction.transformToBytesValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult((ByteArray.compare(leftBytesValues[i], rightBytesValues[i])));
}
}
private void fillIntResultArray(ValueBlock valueBlock, int[] leftIntValues, int length) {
int[] rightIntValues = _rightTransformFunction.transformToIntValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Integer.compare(leftIntValues[i], rightIntValues[i]));
}
}
private void fillLongResultArray(ValueBlock valueBlock, int[] leftValues, int length) {
long[] rightValues = _rightTransformFunction.transformToLongValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Long.compare(leftValues[i], rightValues[i]));
}
}
private void fillFloatResultArray(ValueBlock valueBlock, int[] leftValues, int length) {
float[] rightFloatValues = _rightTransformFunction.transformToFloatValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Double.compare(leftValues[i], rightFloatValues[i]));
}
}
private void fillDoubleResultArray(ValueBlock valueBlock, int[] leftValues, int length) {
double[] rightDoubleValues = _rightTransformFunction.transformToDoubleValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Double.compare(leftValues[i], rightDoubleValues[i]));
}
}
private void fillBigDecimalResultArray(ValueBlock valueBlock, int[] leftValues, int length) {
BigDecimal[] rightBigDecimalValues = _rightTransformFunction.transformToBigDecimalValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(BigDecimal.valueOf(leftValues[i]).compareTo(rightBigDecimalValues[i]));
}
}
private void fillStringResultArray(ValueBlock valueBlock, int[] leftValues, int length) {
String[] rightStringValues = _rightTransformFunction.transformToStringValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
try {
_intValuesSV[i] =
getIntResult(BigDecimal.valueOf(leftValues[i]).compareTo(new BigDecimal(rightStringValues[i])));
} catch (NumberFormatException e) {
_intValuesSV[i] = 0;
}
}
}
private void fillIntResultArray(ValueBlock valueBlock, long[] leftIntValues, int length) {
int[] rightIntValues = _rightTransformFunction.transformToIntValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Long.compare(leftIntValues[i], rightIntValues[i]));
}
}
private void fillLongResultArray(ValueBlock valueBlock, long[] leftValues, int length) {
long[] rightValues = _rightTransformFunction.transformToLongValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Long.compare(leftValues[i], rightValues[i]));
}
}
private void fillFloatResultArray(ValueBlock valueBlock, long[] leftValues, int length) {
float[] rightFloatValues = _rightTransformFunction.transformToFloatValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(compare(leftValues[i], rightFloatValues[i]));
}
}
private void fillDoubleResultArray(ValueBlock valueBlock, long[] leftValues, int length) {
double[] rightDoubleValues = _rightTransformFunction.transformToDoubleValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(compare(leftValues[i], rightDoubleValues[i]));
}
}
private void fillBigDecimalResultArray(ValueBlock valueBlock, long[] leftValues, int length) {
BigDecimal[] rightBigDecimalValues = _rightTransformFunction.transformToBigDecimalValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(BigDecimal.valueOf(leftValues[i]).compareTo(rightBigDecimalValues[i]));
}
}
private void fillStringResultArray(ValueBlock valueBlock, long[] leftValues, int length) {
String[] rightStringValues = _rightTransformFunction.transformToStringValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
try {
_intValuesSV[i] =
getIntResult(BigDecimal.valueOf(leftValues[i]).compareTo(new BigDecimal(rightStringValues[i])));
} catch (NumberFormatException e) {
_intValuesSV[i] = 0;
}
}
}
private void fillIntResultArray(ValueBlock valueBlock, float[] leftValues, int length) {
int[] rightIntValues = _rightTransformFunction.transformToIntValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Double.compare(leftValues[i], rightIntValues[i]));
}
}
private void fillLongResultArray(ValueBlock valueBlock, float[] leftValues, int length) {
long[] rightValues = _rightTransformFunction.transformToLongValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(compare(leftValues[i], rightValues[i]));
}
}
private void fillFloatResultArray(ValueBlock valueBlock, float[] leftValues, int length) {
float[] rightFloatValues = _rightTransformFunction.transformToFloatValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Float.compare(leftValues[i], rightFloatValues[i]));
}
}
private void fillDoubleResultArray(ValueBlock valueBlock, float[] leftValues, int length) {
double[] rightDoubleValues = _rightTransformFunction.transformToDoubleValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Double.compare(leftValues[i], rightDoubleValues[i]));
}
}
private void fillBigDecimalResultArray(ValueBlock valueBlock, float[] leftValues, int length) {
BigDecimal[] rightBigDecimalValues = _rightTransformFunction.transformToBigDecimalValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(BigDecimal.valueOf(leftValues[i]).compareTo(rightBigDecimalValues[i]));
}
}
private void fillStringResultArray(ValueBlock valueBlock, float[] leftValues, int length) {
String[] rightStringValues = _rightTransformFunction.transformToStringValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
try {
_intValuesSV[i] =
getIntResult(BigDecimal.valueOf(leftValues[i]).compareTo(new BigDecimal(rightStringValues[i])));
} catch (NumberFormatException e) {
_intValuesSV[i] = 0;
}
}
}
private void fillIntResultArray(ValueBlock valueBlock, double[] leftValues, int length) {
int[] rightIntValues = _rightTransformFunction.transformToIntValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Double.compare(leftValues[i], rightIntValues[i]));
}
}
private void fillLongResultArray(ValueBlock valueBlock, double[] leftValues, int length) {
long[] rightValues = _rightTransformFunction.transformToLongValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(compare(leftValues[i], rightValues[i]));
}
}
private void fillFloatResultArray(ValueBlock valueBlock, double[] leftValues, int length) {
float[] rightFloatValues = _rightTransformFunction.transformToFloatValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Double.compare(leftValues[i], rightFloatValues[i]));
}
}
private void fillDoubleResultArray(ValueBlock valueBlock, double[] leftValues, int length) {
double[] rightDoubleValues = _rightTransformFunction.transformToDoubleValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(Double.compare(leftValues[i], rightDoubleValues[i]));
}
}
private void fillBigDecimalResultArray(ValueBlock valueBlock, double[] leftValues, int length) {
BigDecimal[] rightBigDecimalValues = _rightTransformFunction.transformToBigDecimalValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(BigDecimal.valueOf(leftValues[i]).compareTo(rightBigDecimalValues[i]));
}
}
private void fillStringResultArray(ValueBlock valueBlock, double[] leftValues, int length) {
String[] rightStringValues = _rightTransformFunction.transformToStringValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
try {
_intValuesSV[i] =
getIntResult(BigDecimal.valueOf(leftValues[i]).compareTo(new BigDecimal(rightStringValues[i])));
} catch (NumberFormatException e) {
_intValuesSV[i] = 0;
}
}
}
private void fillIntResultArray(ValueBlock valueBlock, BigDecimal[] leftValues, int length) {
int[] rightIntValues = _rightTransformFunction.transformToIntValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(leftValues[i].compareTo(BigDecimal.valueOf(rightIntValues[i])));
}
}
private void fillLongResultArray(ValueBlock valueBlock, BigDecimal[] leftValues, int length) {
long[] rightLongValues = _rightTransformFunction.transformToLongValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(leftValues[i].compareTo(BigDecimal.valueOf(rightLongValues[i])));
}
}
private void fillFloatResultArray(ValueBlock valueBlock, BigDecimal[] leftValues, int length) {
float[] rightFloatValues = _rightTransformFunction.transformToFloatValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(leftValues[i].compareTo(BigDecimal.valueOf(rightFloatValues[i])));
}
}
private void fillDoubleResultArray(ValueBlock valueBlock, BigDecimal[] leftValues, int length) {
double[] rightDoubleValues = _rightTransformFunction.transformToDoubleValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(leftValues[i].compareTo(BigDecimal.valueOf(rightDoubleValues[i])));
}
}
private void fillBigDecimalResultArray(ValueBlock valueBlock, BigDecimal[] leftValues, int length) {
BigDecimal[] rightBigDecimalValues = _rightTransformFunction.transformToBigDecimalValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(leftValues[i].compareTo(rightBigDecimalValues[i]));
}
}
private void fillStringResultArray(ValueBlock valueBlock, BigDecimal[] leftValues, int length) {
String[] rightStringValues = _rightTransformFunction.transformToStringValuesSV(valueBlock);
for (int i = 0; i < length; i++) {
_intValuesSV[i] = getIntResult(leftValues[i].compareTo(new BigDecimal(rightStringValues[i])));
}
}
private int compare(long left, double right) {
if (Math.abs(left) <= 1L << 53) {
return Double.compare(left, right);
} else {
return BigDecimal.valueOf(left).compareTo(BigDecimal.valueOf(right));
}
}
private int compare(double left, long right) {
if (Math.abs(right) <= 1L << 53) {
return Double.compare(left, right);
} else {
return BigDecimal.valueOf(left).compareTo(BigDecimal.valueOf(right));
}
}
private int getIntResult(int comparisonResult) {
return getBinaryFuncResult(comparisonResult) ? 1 : 0;
}
private boolean getBinaryFuncResult(int comparisonResult) {
switch (_op) {
case EQUALS:
return comparisonResult == 0;
case GREATER_THAN_OR_EQUAL:
return comparisonResult >= 0;
case GREATER_THAN:
return comparisonResult > 0;
case LESS_THAN:
return comparisonResult < 0;
case LESS_THAN_OR_EQUAL:
return comparisonResult <= 0;
case NOT_EQUAL:
return comparisonResult != 0;
default:
throw new IllegalStateException();
}
}
}
|
apache/druid | 38,320 | processing/src/test/java/org/apache/druid/timeline/partition/OvershadowableManagerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.timeline.partition;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.timeline.partition.OvershadowableManager.RootPartitionRange;
import org.apache.druid.timeline.partition.OvershadowableManager.State;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class OvershadowableManagerTest
{
private static final String MAJOR_VERSION = "version";
@Rule
public ExpectedException expectedException = ExpectedException.none();
private OvershadowableManager<OvershadowableInteger> manager;
private int nextRootPartitionId;
private int nextNonRootPartitionId;
private List<PartitionChunk<OvershadowableInteger>> expectedVisibleChunks;
private List<PartitionChunk<OvershadowableInteger>> expectedOvershadowedChunks;
private List<PartitionChunk<OvershadowableInteger>> expectedStandbyChunks;
@Before
public void setup()
{
manager = new OvershadowableManager<>();
nextRootPartitionId = PartitionIds.ROOT_GEN_START_PARTITION_ID;
nextNonRootPartitionId = PartitionIds.NON_ROOT_GEN_START_PARTITION_ID;
expectedVisibleChunks = new ArrayList<>();
expectedOvershadowedChunks = new ArrayList<>();
expectedStandbyChunks = new ArrayList<>();
}
@Test
public void testCopyVisible()
{
// chunks of partition id 0 and 1
manager.addChunk(newRootChunk());
manager.addChunk(newRootChunk());
// chunks to overshadow the partition id range [0, 2)
manager.addChunk(newNonRootChunk(0, 2, 1, 3));
manager.addChunk(newNonRootChunk(0, 2, 1, 3));
manager.addChunk(newNonRootChunk(0, 2, 1, 3));
// chunks of partition id 3 and 4
manager.addChunk(newRootChunk());
manager.addChunk(newRootChunk());
// standby chunk
manager.addChunk(newNonRootChunk(2, 4, 1, 3));
OvershadowableManager<OvershadowableInteger> copy = OvershadowableManager.copyVisible(manager);
Assert.assertTrue(copy.getOvershadowedChunks().isEmpty());
Assert.assertTrue(copy.getStandbyChunks().isEmpty());
Assert.assertEquals(
Lists.newArrayList(manager.visibleChunksIterator()),
Lists.newArrayList(copy.visibleChunksIterator())
);
}
@Test
public void testDeepCopy()
{
// chunks of partition id 0 and 1
manager.addChunk(newRootChunk());
manager.addChunk(newRootChunk());
// chunks to overshadow the partition id range [0, 2)
manager.addChunk(newNonRootChunk(0, 2, 1, 3));
manager.addChunk(newNonRootChunk(0, 2, 1, 3));
manager.addChunk(newNonRootChunk(0, 2, 1, 3));
// chunks of partition id 3 and 4
manager.addChunk(newRootChunk());
manager.addChunk(newRootChunk());
// standby chunk
manager.addChunk(newNonRootChunk(2, 4, 1, 3));
OvershadowableManager<OvershadowableInteger> copy = OvershadowableManager.deepCopy(manager);
Assert.assertEquals(manager, copy);
}
@Test
public void testEqualAndHashCodeContract()
{
EqualsVerifier.forClass(OvershadowableManager.class).usingGetClass().verify();
}
@Test
public void testFindOvershadowedBy()
{
final List<PartitionChunk<OvershadowableInteger>> expectedOvershadowedChunks = new ArrayList<>();
// All chunks except the last one are in the overshadowed state
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(0, 2, 1, 1);
manager.addChunk(chunk);
chunk = newNonRootChunk(0, 3, 2, 1);
manager.addChunk(chunk);
chunk = newNonRootChunk(0, 5, 3, 1);
manager.addChunk(chunk);
chunk = newNonRootChunk(5, 8, 1, 1);
expectedOvershadowedChunks.add(chunk);
manager.addChunk(chunk);
chunk = newNonRootChunk(8, 11, 2, 1);
manager.addChunk(chunk);
chunk = newNonRootChunk(5, 11, 3, 1);
manager.addChunk(chunk);
chunk = newNonRootChunk(0, 12, 5, 1);
manager.addChunk(chunk);
List<AtomicUpdateGroup<OvershadowableInteger>> overshadowedGroups = manager.findOvershadowedBy(
RootPartitionRange.of(2, 10),
(short) 10,
State.OVERSHADOWED
);
Assert.assertEquals(
expectedOvershadowedChunks.stream().map(AtomicUpdateGroup::new).collect(Collectors.toList()),
overshadowedGroups
);
overshadowedGroups = manager.findOvershadowedBy(
RootPartitionRange.of(2, 10),
(short) 10,
State.VISIBLE
);
Assert.assertEquals(
Collections.emptyList(),
overshadowedGroups
);
}
@Test
public void testFindOvershadows()
{
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(2, 6, 3, 1);
manager.addChunk(chunk);
chunk = newNonRootChunk(6, 8, 3, 1);
manager.addChunk(chunk);
chunk = newNonRootChunk(1, 8, 4, 1);
final PartitionChunk<OvershadowableInteger> visibleChunk = chunk;
manager.addChunk(chunk);
List<AtomicUpdateGroup<OvershadowableInteger>> overshadowingGroups = manager.findOvershadows(
RootPartitionRange.of(1, 3),
(short) 1,
State.OVERSHADOWED
);
Assert.assertEquals(
Collections.emptyList(),
overshadowingGroups
);
overshadowingGroups = manager.findOvershadows(
RootPartitionRange.of(1, 3),
(short) 1,
State.VISIBLE
);
Assert.assertEquals(
ImmutableList.of(new AtomicUpdateGroup<>(visibleChunk)),
overshadowingGroups
);
overshadowingGroups = manager.findOvershadows(
RootPartitionRange.of(4, 7),
(short) 1,
State.OVERSHADOWED
);
Assert.assertEquals(
Collections.emptyList(),
overshadowingGroups
);
overshadowingGroups = manager.findOvershadows(
RootPartitionRange.of(4, 7),
(short) 1,
State.VISIBLE
);
Assert.assertEquals(
ImmutableList.of(new AtomicUpdateGroup<>(visibleChunk)),
overshadowingGroups
);
}
@Test
public void testAddRootChunkToEmptyManager()
{
Assert.assertTrue(manager.isEmpty());
// Add a new one
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
Assert.assertTrue(manager.isComplete());
// Add a duplicate
Assert.assertFalse(manager.addChunk(chunk));
// Add a new one
chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
Assert.assertTrue(manager.isComplete());
}
@Test
public void testAddNonRootChunkToEmptyManager()
{
Assert.assertTrue(manager.isEmpty());
// Add a new one, atomicUpdateGroup is not full
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(10, 12, 1, 3);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
Assert.assertFalse(manager.isComplete());
// Add a new one, atomicUpdateGroup is still not full
chunk = newNonRootChunk(10, 12, 1, 3);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
Assert.assertFalse(manager.isComplete());
// Add a new one, now atomicUpdateGroup is full
chunk = newNonRootChunk(10, 12, 1, 3);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
Assert.assertTrue(manager.isComplete());
// Add a new one to the full group
expectedException.expect(IllegalStateException.class);
expectedException.expectMessage("Can't add chunk");
chunk = newNonRootChunk(10, 12, 1, 3);
addVisibleToManager(chunk);
}
@Test
public void testRemoveFromEmptyManager()
{
Assert.assertTrue(manager.isEmpty());
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertNull(manager.removeChunk(chunk));
}
@Test
public void testAddOvershadowedChunkToCompletePartition()
{
// Start with a non-root incomplete partitionChunk
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(0, 3, 1, 2);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add a visible root chunk, now this group is complete
chunk = newNonRootChunk(0, 3, 1, 2);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add an overshadowed chunk
nextRootPartitionId = 1;
chunk = newRootChunk();
Assert.assertTrue(manager.addChunk(chunk));
expectedOvershadowedChunks.add(chunk);
assertManagerState();
}
@Test
public void testAddOvershadowedChunkToIncompletePartition()
{
// Start with a non-root partitionChunk. This group is incomplete.
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(0, 3, 1, 2);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add an overshadowed chunk
nextRootPartitionId = 1;
chunk = newRootChunk();
expectedOvershadowedChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
}
@Test
public void testAddStandbyChunksToCompletePartition()
{
// Add complete chunks
PartitionChunk<OvershadowableInteger> chunk;
for (int i = 0; i < 3; i++) {
chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
}
// Add a chunk of an incomplete group
chunk = newNonRootChunk(0, 3, 1, 2);
expectedStandbyChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
// This group is now full
chunk = newNonRootChunk(0, 3, 1, 2);
expectedOvershadowedChunks.addAll(expectedVisibleChunks);
expectedVisibleChunks.clear();
expectedVisibleChunks.addAll(expectedStandbyChunks);
expectedStandbyChunks.clear();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
}
@Test
public void testAddStandbyChunksToIncompletePartition()
{
// Add a chunk of an incomplete group
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(0, 3, 1, 2);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add chunks of an incomplete group overshadowing the previous one
chunk = newNonRootChunk(0, 3, 2, 3);
expectedOvershadowedChunks.add(expectedVisibleChunks.remove(0));
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
chunk = newNonRootChunk(0, 3, 2, 3);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
}
@Test
public void testRemoveUnknownChunk()
{
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
chunk = newRootChunk();
Assert.assertNull(manager.removeChunk(chunk));
assertManagerState();
}
@Test
public void testRemoveChunksUntilEmpty()
{
PartitionChunk<OvershadowableInteger> chunk;
for (int i = 0; i < 10; i++) {
chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
}
while (expectedVisibleChunks.size() > 0) {
chunk = expectedVisibleChunks.remove(ThreadLocalRandom.current().nextInt(expectedVisibleChunks.size()));
Assert.assertEquals(chunk, manager.removeChunk(chunk));
assertManagerState();
}
Assert.assertTrue(manager.isEmpty());
}
@Test
public void testRemoveStandbyChunk()
{
// Add complete groups
PartitionChunk<OvershadowableInteger> chunk;
for (int i = 0; i < 3; i++) {
chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
}
// Add two chunks of an incomplete group overshadowing the previous one
chunk = newNonRootChunk(0, 3, 1, 3);
expectedStandbyChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
chunk = newNonRootChunk(0, 3, 1, 3);
expectedStandbyChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
// Remove one standby chunk
chunk = expectedStandbyChunks.remove(0);
Assert.assertEquals(chunk, manager.removeChunk(chunk));
assertManagerState();
}
@Test
public void testRemoveVisibleChunkAndFallBackToStandby()
{
// Add two complete groups
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add two chunks of an incomplete group
chunk = newNonRootChunk(0, 2, 1, 3);
expectedStandbyChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
chunk = newNonRootChunk(0, 2, 1, 3);
expectedStandbyChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
// Remove a chunk of the incomplete group
chunk = expectedVisibleChunks.remove(0);
Assert.assertEquals(chunk, manager.removeChunk(chunk));
expectedOvershadowedChunks.addAll(expectedVisibleChunks);
expectedVisibleChunks.clear();
expectedVisibleChunks.addAll(expectedStandbyChunks);
expectedStandbyChunks.clear();
assertManagerState();
}
@Test
public void testAddCompleteOvershadowedToInCompletePartition()
{
// Add an incomplete group
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(0, 2, 1, 3);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
chunk = newNonRootChunk(0, 2, 1, 3);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add a complete overshadowed group
chunk = newRootChunk();
expectedOvershadowedChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
chunk = newRootChunk();
expectedStandbyChunks.addAll(expectedVisibleChunks);
expectedVisibleChunks.clear();
expectedVisibleChunks.add(expectedOvershadowedChunks.remove(0));
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
}
@Test
public void testAddCompleteOvershadowedToCompletePartition()
{
// Add a complete group
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(0, 2, 1, 2);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
chunk = newNonRootChunk(0, 2, 1, 2);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add a complete overshadowed group
chunk = newRootChunk();
expectedOvershadowedChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
chunk = newRootChunk();
expectedOvershadowedChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
}
@Test
public void testRemoveChunkFromOvershadowd()
{
// Add a complete group
nextRootPartitionId = 1;
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add an incomplete group of a larger partition range
chunk = newNonRootChunk(0, 2, 1, 2);
expectedOvershadowedChunks.add(expectedVisibleChunks.remove(0));
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Remove an overshadowed chunk
chunk = expectedOvershadowedChunks.remove(0);
Assert.assertEquals(chunk, manager.removeChunk(chunk));
assertManagerState();
}
@Test
public void testRemoveChunkFromCompleteParition()
{
// Add a complete group
nextRootPartitionId = 1;
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add a complete group overshadowing the previous
chunk = newNonRootChunk(0, 2, 1, 2);
expectedOvershadowedChunks.add(expectedVisibleChunks.remove(0));
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
chunk = newNonRootChunk(0, 2, 1, 2);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Remove a chunk from the visible group
chunk = expectedVisibleChunks.remove(0);
Assert.assertEquals(chunk, manager.removeChunk(chunk));
assertManagerState();
// Remove another chunk from the visible group. Now the overshadowed group should be visible.
chunk = expectedVisibleChunks.remove(0);
expectedVisibleChunks.addAll(expectedOvershadowedChunks);
expectedOvershadowedChunks.clear();
Assert.assertEquals(chunk, manager.removeChunk(chunk));
assertManagerState();
}
@Test
public void testRemoveChunkFromCompletePartitionFallBackToOvershadowed()
{
// Add complete groups
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add a complete group overshadowing the previous
chunk = newNonRootChunk(0, 2, 1, 2);
expectedStandbyChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
chunk = newNonRootChunk(0, 2, 1, 2);
expectedOvershadowedChunks.addAll(expectedVisibleChunks);
expectedVisibleChunks.clear();
expectedVisibleChunks.add(expectedStandbyChunks.remove(0));
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Remove a visible chunk. Should fall back to the complete overshadowed group.
chunk = expectedVisibleChunks.remove(0);
expectedStandbyChunks.addAll(expectedVisibleChunks);
expectedVisibleChunks.clear();
expectedVisibleChunks.addAll(expectedOvershadowedChunks);
expectedOvershadowedChunks.clear();
Assert.assertEquals(chunk, manager.removeChunk(chunk));
assertManagerState();
}
@Test
public void testAddCompleteOvershadowedToCompletePartition2()
{
// Add overshadowed incomplete groups
List<PartitionChunk<OvershadowableInteger>> chunks = newNonRootChunks(2, 0, 2, 1, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 2, 5, 1, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 5, 8, 1, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 8, 10, 1, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 0, 5, 2, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 0, 8, 3, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
// Add a visible complete group
chunks = newNonRootChunks(2, 0, 10, 4, 2);
expectedVisibleChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
// Add a standby incomplete group
chunks = newNonRootChunks(1, 0, 10, 5, 2);
expectedStandbyChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
assertManagerState();
// Add a chunk to complete the second overshadowed group
chunks = newNonRootChunks(1, 0, 5, 2, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(1, 5, 8, 1, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(1, 8, 10, 1, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
assertManagerState();
// Remove a chunk from the visible group
PartitionChunk<OvershadowableInteger> chunkToRemove = expectedVisibleChunks.remove(0);
expectedStandbyChunks.addAll(expectedVisibleChunks);
expectedVisibleChunks.clear();
Iterator<PartitionChunk<OvershadowableInteger>> iterator = expectedOvershadowedChunks.iterator();
while (iterator.hasNext()) {
final PartitionChunk<OvershadowableInteger> chunk = iterator.next();
if (chunk.getObject().getStartRootPartitionId() == 0 && chunk.getObject().getMinorVersion() == 2
|| chunk.getObject().getStartRootPartitionId() == 5 && chunk.getObject().getMinorVersion() == 1
|| chunk.getObject().getStartRootPartitionId() == 8 && chunk.getObject().getMinorVersion() == 1) {
expectedVisibleChunks.add(chunk);
iterator.remove();
} else if (chunk.getObject().getStartRootPartitionId() == 0 && chunk.getObject().getMinorVersion() > 2
|| chunk.getObject().getStartRootPartitionId() == 5 && chunk.getObject().getMinorVersion() > 1
|| chunk.getObject().getStartRootPartitionId() == 8 && chunk.getObject().getMinorVersion() > 1) {
expectedStandbyChunks.add(chunk);
iterator.remove();
}
}
Assert.assertEquals(chunkToRemove, manager.removeChunk(chunkToRemove));
assertManagerState();
}
@Test
public void testAddCompleteStandbyToCompletePartition()
{
// Add overshadowed groups
List<PartitionChunk<OvershadowableInteger>> chunks = newNonRootChunks(2, 0, 2, 1, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 2, 5, 1, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 5, 8, 1, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 8, 10, 1, 2);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
// Visible group for [0, 5)
chunks = newNonRootChunks(2, 0, 5, 2, 2);
expectedVisibleChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
// Visible group for [5, 10)
chunks = newNonRootChunks(2, 5, 10, 2, 2);
expectedVisibleChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
// Standby groups
chunks = newNonRootChunks(2, 0, 5, 3, 3);
expectedStandbyChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 5, 10, 3, 3);
expectedStandbyChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 0, 5, 4, 3);
expectedStandbyChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 0, 10, 5, 3);
expectedStandbyChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
assertManagerState();
// Add a chunk to complete the second standby group
expectedOvershadowedChunks.addAll(expectedVisibleChunks);
expectedVisibleChunks.clear();
chunks = newNonRootChunks(1, 0, 5, 4, 3);
chunks.forEach(this::addVisibleToManager);
chunks = newNonRootChunks(1, 5, 10, 3, 3);
chunks.forEach(this::addVisibleToManager);
Iterator<PartitionChunk<OvershadowableInteger>> iterator = expectedStandbyChunks.iterator();
while (iterator.hasNext()) {
final PartitionChunk<OvershadowableInteger> chunk = iterator.next();
if (chunk.getObject().getStartRootPartitionId() == 0 && chunk.getObject().getMinorVersion() == 4
|| chunk.getObject().getStartRootPartitionId() == 5 && chunk.getObject().getMinorVersion() == 3) {
expectedVisibleChunks.add(chunk);
iterator.remove();
} else if (chunk.getObject().getStartRootPartitionId() == 0 && chunk.getObject().getMinorVersion() < 4
|| chunk.getObject().getStartRootPartitionId() == 5 && chunk.getObject().getMinorVersion() < 3) {
expectedOvershadowedChunks.add(chunk);
iterator.remove();
}
}
assertManagerState();
}
@Test
public void testFallBackToStandby2()
{
// Add an overshadowed incomplete group
List<PartitionChunk<OvershadowableInteger>> chunks = newNonRootChunks(2, 0, 2, 1, 3);
expectedOvershadowedChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
// Add a visible complete group
chunks = newNonRootChunks(2, 0, 2, 2, 2);
expectedVisibleChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
// Add three standby incomplete groups
chunks = newNonRootChunks(2, 0, 2, 3, 3);
expectedStandbyChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 0, 2, 4, 3);
expectedStandbyChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
chunks = newNonRootChunks(2, 0, 2, 5, 3);
expectedStandbyChunks.addAll(chunks);
chunks.forEach(manager::addChunk);
assertManagerState();
// Remove a visible chunk. The latest standby group should be visible.
PartitionChunk<OvershadowableInteger> chunkToRemove = expectedVisibleChunks.remove(0);
expectedOvershadowedChunks.addAll(expectedVisibleChunks);
expectedVisibleChunks.clear();
Iterator<PartitionChunk<OvershadowableInteger>> iterator = expectedStandbyChunks.iterator();
while (iterator.hasNext()) {
final PartitionChunk<OvershadowableInteger> chunk = iterator.next();
if (chunk.getObject().getMinorVersion() == 5) {
expectedVisibleChunks.add(chunk);
iterator.remove();
} else {
expectedOvershadowedChunks.add(chunk);
iterator.remove();
}
}
Assert.assertEquals(chunkToRemove, manager.removeChunk(chunkToRemove));
assertManagerState();
}
@Test
public void testAddAndOverwriteAndAdd()
{
// Start with root partitionChunks
for (int i = 0; i < 5; i++) {
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
}
assertManagerState();
// Overwrite some partitionChunks with a higher minor version
final int rootStartPartitionIdToOverwrite = expectedVisibleChunks.get(1).getChunkNumber();
final int rootEndPartitionIdToOverwrite = expectedVisibleChunks.get(3).getChunkNumber();
for (int i = 0; i < 2; i++) {
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(
rootStartPartitionIdToOverwrite,
rootEndPartitionIdToOverwrite,
3,
2
);
Assert.assertTrue(manager.addChunk(chunk));
if (i == 0) {
expectedStandbyChunks.add(chunk);
}
if (i == 1) {
expectedOvershadowedChunks.addAll(expectedVisibleChunks.subList(1, 3));
expectedVisibleChunks.subList(1, 3).clear();
expectedVisibleChunks.addAll(expectedStandbyChunks);
expectedVisibleChunks.add(chunk);
expectedStandbyChunks.clear();
}
assertManagerState();
}
// Append new visible chunks
for (int i = 0; i < 3; i++) {
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
}
assertManagerState();
// Append complete overshadowed chunks
for (int i = 0; i < 2; i++) {
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(
rootStartPartitionIdToOverwrite,
rootEndPartitionIdToOverwrite,
2,
2
);
expectedOvershadowedChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
}
}
@Test
public void testRemoveOvershadowed()
{
// Start with root partitionChunks
for (int i = 0; i < 5; i++) {
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
}
// Overwrite some partitionChunks with a higher minor version
final int rootStartPartitionIdToOverwrite = expectedVisibleChunks.get(1).getChunkNumber();
final int rootEndPartitionIdToOverwrite = expectedVisibleChunks.get(3).getChunkNumber();
for (int i = 0; i < 2; i++) {
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(
rootStartPartitionIdToOverwrite,
rootEndPartitionIdToOverwrite,
1,
2
);
Assert.assertTrue(addVisibleToManager(chunk));
}
expectedOvershadowedChunks.addAll(expectedVisibleChunks.subList(1, 3));
IntStream.range(0, 2).forEach(i -> expectedVisibleChunks.remove(1));
assertManagerState();
// Remove an overshadowed chunk
PartitionChunk<OvershadowableInteger> chunk = expectedOvershadowedChunks.remove(0);
Assert.assertEquals(chunk, manager.removeChunk(chunk));
assertManagerState();
// Remove a chunk overshadows others
for (PartitionChunk<OvershadowableInteger> visibleChunk : expectedVisibleChunks) {
if (visibleChunk.getChunkNumber() >= PartitionIds.NON_ROOT_GEN_START_PARTITION_ID) {
Assert.assertEquals(visibleChunk, removeVisibleFromManager(visibleChunk));
break;
}
}
assertManagerState();
}
@Test
public void testRemoveOvershadowingVisible()
{
// Start with root partitionChunks
for (int i = 0; i < 5; i++) {
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
}
// Overwrite some partitionChunks with a higher minor version
final int rootStartPartitionIdToOverwrite = expectedVisibleChunks.get(1).getChunkNumber();
final int rootEndPartitionIdToOverwrite = expectedVisibleChunks.get(3).getChunkNumber();
for (int i = 0; i < 2; i++) {
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(
rootStartPartitionIdToOverwrite,
rootEndPartitionIdToOverwrite,
1,
2
);
Assert.assertTrue(addVisibleToManager(chunk));
}
expectedOvershadowedChunks.addAll(expectedVisibleChunks.subList(1, 3));
IntStream.range(0, 2).forEach(i -> expectedVisibleChunks.remove(1));
assertManagerState();
// Remove a chunk overshadows others
boolean removed = false;
final Iterator<PartitionChunk<OvershadowableInteger>> iterator = expectedVisibleChunks.iterator();
while (iterator.hasNext()) {
final PartitionChunk<OvershadowableInteger> visibleChunk = iterator.next();
if (visibleChunk.getChunkNumber() >= PartitionIds.NON_ROOT_GEN_START_PARTITION_ID) {
iterator.remove();
if (!removed) {
manager.removeChunk(visibleChunk);
removed = true;
} else {
expectedStandbyChunks.add(visibleChunk);
}
}
}
expectedVisibleChunks.addAll(expectedOvershadowedChunks);
expectedOvershadowedChunks.clear();
assertManagerState();
}
@Test
public void testFallBackToStandbyOnRemove()
{
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add a chunk of an incomplete atomicUpdateGroup
chunk = newNonRootChunk(0, 1, 1, 3);
expectedStandbyChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
// Add a chunk of an incomplete atomicUpdateGroup which overshadows the previous one
chunk = newNonRootChunk(0, 1, 2, 2);
expectedStandbyChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
assertManagerState();
// Remove the visible chunk
chunk = expectedVisibleChunks.remove(0);
expectedVisibleChunks.add(expectedStandbyChunks.remove(1));
expectedOvershadowedChunks.add(expectedStandbyChunks.remove(0));
Assert.assertEquals(chunk, manager.removeChunk(chunk));
assertManagerState();
}
@Test
public void testFallBackToOvershadowedOnRemove()
{
PartitionChunk<OvershadowableInteger> chunk;
// Add incomplete non-root group
for (int i = 0; i < 2; i++) {
chunk = newNonRootChunk(10, 20, 5, 3);
Assert.assertTrue(addVisibleToManager(chunk));
}
assertManagerState();
// Add incomplete non-root group overshadowed by the previous one
for (int i = 0; i < 2; i++) {
chunk = newNonRootChunk(10, 20, 4, 3);
expectedOvershadowedChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
chunk = newNonRootChunk(10, 20, 3, 3);
expectedOvershadowedChunks.add(chunk);
Assert.assertTrue(manager.addChunk(chunk));
}
assertManagerState();
// Remove the visible group one by one
chunk = expectedVisibleChunks.remove(0);
Assert.assertEquals(chunk, manager.removeChunk(chunk));
assertManagerState();
chunk = expectedVisibleChunks.remove(0);
expectedOvershadowedChunks
.stream()
.filter(c -> c.getObject().getMinorVersion() == 4)
.forEach(c -> expectedVisibleChunks.add(c));
expectedOvershadowedChunks.removeAll(expectedVisibleChunks);
Assert.assertEquals(chunk, manager.removeChunk(chunk));
assertManagerState();
}
@Test
public void testAddIncompleteAtomicUpdateGroups()
{
// Add an incomplete chunk
PartitionChunk<OvershadowableInteger> chunk = newNonRootChunk(0, 1, 1, 3);
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add an incomplete chunk overshadowing the previous one. The atomicUpdateGroup of this chunk
// will be complete later in this test.
chunk = newNonRootChunk(0, 1, 2, 2);
expectedOvershadowedChunks.add(expectedVisibleChunks.remove(0));
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add an incomplete chunk overshadowing the previous one
chunk = newNonRootChunk(0, 1, 3, 5);
expectedOvershadowedChunks.add(expectedVisibleChunks.remove(0));
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add a chunk to complete the second atomicUpdateGroup overshadowed by the previous one
chunk = newNonRootChunk(0, 1, 2, 2);
expectedStandbyChunks.add(expectedVisibleChunks.remove(0));
expectedVisibleChunks.add(expectedOvershadowedChunks.remove(expectedOvershadowedChunks.size() - 1));
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
}
@Test
public void testMissingStartRootPartitionId()
{
// Simulate the first two chunks are missing at the root level
nextRootPartitionId = 2;
PartitionChunk<OvershadowableInteger> chunk = newRootChunk();
Assert.assertTrue(addVisibleToManager(chunk));
assertManagerState();
// Add a new group overshadows the previous one
expectedOvershadowedChunks.addAll(expectedVisibleChunks);
expectedVisibleChunks.clear();
for (int i = 0; i < 2; i++) {
chunk = newNonRootChunk(0, 3, 1, 2);
Assert.assertTrue(addVisibleToManager(chunk));
}
assertManagerState();
// Remove the visible group
for (int i = 0; i < 2; i++) {
chunk = expectedVisibleChunks.remove(0);
Assert.assertEquals(chunk, manager.removeChunk(chunk));
}
expectedVisibleChunks.addAll(expectedOvershadowedChunks);
expectedOvershadowedChunks.clear();
assertManagerState();
}
private boolean addVisibleToManager(PartitionChunk<OvershadowableInteger> chunk)
{
expectedVisibleChunks.add(chunk);
return manager.addChunk(chunk);
}
private PartitionChunk<OvershadowableInteger> removeVisibleFromManager(PartitionChunk<OvershadowableInteger> chunk)
{
expectedVisibleChunks.remove(chunk);
return manager.removeChunk(chunk);
}
private void assertManagerState()
{
Assert.assertEquals(
"Mismatched visible chunks",
new HashSet<>(expectedVisibleChunks),
Sets.newHashSet(manager.visibleChunksIterator())
);
Assert.assertEquals(
"Mismatched overshadowed chunks",
new HashSet<>(expectedOvershadowedChunks),
new HashSet<>(manager.getOvershadowedChunks())
);
Assert.assertEquals(
"Mismatched standby chunks",
new HashSet<>(expectedStandbyChunks),
new HashSet<>(manager.getStandbyChunks())
);
}
private List<PartitionChunk<OvershadowableInteger>> newNonRootChunks(
int n,
int startPartitionId,
int endPartitionId,
int minorVersion,
int atomicUpdateGroupSize
)
{
return IntStream
.range(0, n)
.mapToObj(i -> newNonRootChunk(startPartitionId, endPartitionId, minorVersion, atomicUpdateGroupSize))
.collect(Collectors.toList());
}
private NumberedPartitionChunk<OvershadowableInteger> newRootChunk()
{
final int partitionId = nextRootPartitionId();
return new NumberedPartitionChunk<>(partitionId, 0, new OvershadowableInteger(MAJOR_VERSION, partitionId, 0));
}
private NumberedOverwritingPartitionChunk<OvershadowableInteger> newNonRootChunk(
int startRootPartitionId,
int endRootPartitionId,
int minorVersion,
int atomicUpdateGroupSize
)
{
final int partitionId = nextNonRootPartitionId();
return new NumberedOverwritingPartitionChunk<>(
partitionId,
new OvershadowableInteger(
MAJOR_VERSION,
partitionId,
0,
startRootPartitionId,
endRootPartitionId,
minorVersion,
atomicUpdateGroupSize
)
);
}
private int nextRootPartitionId()
{
return nextRootPartitionId++;
}
private int nextNonRootPartitionId()
{
return nextNonRootPartitionId++;
}
}
|
apache/fluss | 38,403 | fluss-lake/fluss-lake-paimon/src/test/java/org/apache/fluss/lake/paimon/tiering/PaimonTieringTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.fluss.lake.paimon.tiering;
import org.apache.fluss.config.ConfigOptions;
import org.apache.fluss.config.Configuration;
import org.apache.fluss.lake.committer.CommittedLakeSnapshot;
import org.apache.fluss.lake.committer.LakeCommitter;
import org.apache.fluss.lake.serializer.SimpleVersionedSerializer;
import org.apache.fluss.lake.writer.LakeWriter;
import org.apache.fluss.lake.writer.WriterInitContext;
import org.apache.fluss.metadata.TableBucket;
import org.apache.fluss.metadata.TableDescriptor;
import org.apache.fluss.metadata.TableInfo;
import org.apache.fluss.metadata.TablePath;
import org.apache.fluss.record.ChangeType;
import org.apache.fluss.record.GenericRecord;
import org.apache.fluss.record.LogRecord;
import org.apache.fluss.row.BinaryString;
import org.apache.fluss.row.GenericRow;
import org.apache.fluss.utils.types.Tuple2;
import org.apache.paimon.CoreOptions;
import org.apache.paimon.catalog.Catalog;
import org.apache.paimon.catalog.CatalogContext;
import org.apache.paimon.catalog.CatalogFactory;
import org.apache.paimon.catalog.Identifier;
import org.apache.paimon.data.InternalRow;
import org.apache.paimon.options.Options;
import org.apache.paimon.schema.Schema;
import org.apache.paimon.table.FileStoreTable;
import org.apache.paimon.table.source.DataSplit;
import org.apache.paimon.table.source.ReadBuilder;
import org.apache.paimon.table.source.Split;
import org.apache.paimon.types.DataTypes;
import org.apache.paimon.utils.CloseableIterator;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import static org.apache.fluss.flink.tiering.committer.TieringCommitOperator.toBucketOffsetsProperty;
import static org.apache.fluss.lake.committer.BucketOffset.FLUSS_LAKE_SNAP_BUCKET_OFFSET_PROPERTY;
import static org.apache.fluss.lake.paimon.utils.PaimonConversions.toPaimon;
import static org.apache.fluss.metadata.TableDescriptor.BUCKET_COLUMN_NAME;
import static org.apache.fluss.metadata.TableDescriptor.OFFSET_COLUMN_NAME;
import static org.apache.fluss.metadata.TableDescriptor.TIMESTAMP_COLUMN_NAME;
import static org.apache.fluss.record.ChangeType.DELETE;
import static org.apache.fluss.record.ChangeType.INSERT;
import static org.apache.fluss.record.ChangeType.UPDATE_AFTER;
import static org.apache.fluss.record.ChangeType.UPDATE_BEFORE;
import static org.assertj.core.api.Assertions.assertThat;
/** The UT for tiering to Paimon via {@link PaimonLakeTieringFactory}. */
class PaimonTieringTest {
private @TempDir File tempWarehouseDir;
private PaimonLakeTieringFactory paimonLakeTieringFactory;
private Catalog paimonCatalog;
@BeforeEach
void beforeEach() {
Configuration configuration = new Configuration();
configuration.setString("warehouse", tempWarehouseDir.toString());
paimonLakeTieringFactory = new PaimonLakeTieringFactory(configuration);
paimonCatalog =
CatalogFactory.createCatalog(
CatalogContext.create(Options.fromMap(configuration.toMap())));
}
private static Stream<Arguments> tieringWriteArgs() {
return Stream.of(
Arguments.of(true, true),
Arguments.of(true, false),
Arguments.of(false, true),
Arguments.of(false, false));
}
@ParameterizedTest
@MethodSource("tieringWriteArgs")
void testTieringWriteTable(boolean isPrimaryKeyTable, boolean isPartitioned) throws Exception {
int bucketNum = 3;
TablePath tablePath =
TablePath.of(
"paimon",
String.format(
"test_tiering_table_%s_%s",
isPrimaryKeyTable ? "primary_key" : "log",
isPartitioned ? "partitioned" : "non_partitioned"));
createTable(
tablePath, isPrimaryKeyTable, isPartitioned, isPrimaryKeyTable ? bucketNum : null);
TableDescriptor descriptor =
TableDescriptor.builder()
.schema(
org.apache.fluss.metadata.Schema.newBuilder()
.column("c1", org.apache.fluss.types.DataTypes.INT())
.column("c2", org.apache.fluss.types.DataTypes.STRING())
.column("c3", org.apache.fluss.types.DataTypes.STRING())
.build())
.distributedBy(bucketNum)
.property(ConfigOptions.TABLE_DATALAKE_ENABLED, true)
.build();
TableInfo tableInfo = TableInfo.of(tablePath, 0, 1, descriptor, 1L, 1L);
List<PaimonWriteResult> paimonWriteResults = new ArrayList<>();
SimpleVersionedSerializer<PaimonWriteResult> writeResultSerializer =
paimonLakeTieringFactory.getWriteResultSerializer();
SimpleVersionedSerializer<PaimonCommittable> committableSerializer =
paimonLakeTieringFactory.getCommittableSerializer();
try (LakeCommitter<PaimonWriteResult, PaimonCommittable> lakeCommitter =
createLakeCommitter(tablePath)) {
// should no any missing snapshot
assertThat(lakeCommitter.getMissingLakeSnapshot(1L)).isNull();
}
Map<Tuple2<String, Integer>, List<LogRecord>> recordsByBucket = new HashMap<>();
Map<Long, String> partitionIdAndName =
isPartitioned
? new HashMap<Long, String>() {
{
put(1L, "p1");
put(2L, "p2");
put(3L, "p3");
}
}
: Collections.singletonMap(null, null);
Map<TableBucket, Long> tableBucketOffsets = new HashMap<>();
// first, write data
for (int bucket = 0; bucket < bucketNum; bucket++) {
for (Map.Entry<Long, String> entry : partitionIdAndName.entrySet()) {
String partition = entry.getValue();
try (LakeWriter<PaimonWriteResult> lakeWriter =
createLakeWriter(tablePath, bucket, partition, entry.getKey(), tableInfo)) {
Tuple2<String, Integer> partitionBucket = Tuple2.of(partition, bucket);
Tuple2<List<LogRecord>, List<LogRecord>> writeAndExpectRecords =
isPrimaryKeyTable
? genPrimaryKeyTableRecords(partition, bucket)
: genLogTableRecords(partition, bucket, 10);
List<LogRecord> writtenRecords = writeAndExpectRecords.f0;
List<LogRecord> expectRecords = writeAndExpectRecords.f1;
recordsByBucket.put(partitionBucket, expectRecords);
tableBucketOffsets.put(new TableBucket(0, entry.getKey(), bucket), 10L);
for (LogRecord logRecord : writtenRecords) {
lakeWriter.write(logRecord);
}
// serialize/deserialize writeResult
PaimonWriteResult paimonWriteResult = lakeWriter.complete();
byte[] serialized = writeResultSerializer.serialize(paimonWriteResult);
paimonWriteResults.add(
writeResultSerializer.deserialize(
writeResultSerializer.getVersion(), serialized));
}
}
}
// second, commit data
try (LakeCommitter<PaimonWriteResult, PaimonCommittable> lakeCommitter =
createLakeCommitter(tablePath)) {
// serialize/deserialize committable
PaimonCommittable paimonCommittable = lakeCommitter.toCommittable(paimonWriteResults);
byte[] serialized = committableSerializer.serialize(paimonCommittable);
paimonCommittable =
committableSerializer.deserialize(
committableSerializer.getVersion(), serialized);
long snapshot =
lakeCommitter.commit(
paimonCommittable,
toBucketOffsetsProperty(
tableBucketOffsets,
partitionIdAndName,
getPartitionKeys(tablePath)));
assertThat(snapshot).isEqualTo(1);
}
// then, check data
for (int bucket = 0; bucket < 3; bucket++) {
for (String partition : partitionIdAndName.values()) {
Tuple2<String, Integer> partitionBucket = Tuple2.of(partition, bucket);
List<LogRecord> expectRecords = recordsByBucket.get(partitionBucket);
CloseableIterator<InternalRow> actualRecords =
getPaimonRows(tablePath, partition, isPrimaryKeyTable, bucket);
verifyTableRecords(actualRecords, expectRecords, bucket, partition);
}
}
// then, let's verify getMissingLakeSnapshot works
try (LakeCommitter<PaimonWriteResult, PaimonCommittable> lakeCommitter =
createLakeCommitter(tablePath)) {
// use snapshot id 0 as the known snapshot id
CommittedLakeSnapshot committedLakeSnapshot = lakeCommitter.getMissingLakeSnapshot(0L);
assertThat(committedLakeSnapshot).isNotNull();
Map<Tuple2<Long, Integer>, Long> offsets = committedLakeSnapshot.getLogEndOffsets();
for (int bucket = 0; bucket < 3; bucket++) {
for (Long partitionId : partitionIdAndName.keySet()) {
// we only write 10 records, so expected log offset should be 10
assertThat(offsets.get(Tuple2.of(partitionId, bucket))).isEqualTo(10);
}
}
assertThat(committedLakeSnapshot.getLakeSnapshotId()).isOne();
// use null as the known snapshot id
CommittedLakeSnapshot committedLakeSnapshot2 =
lakeCommitter.getMissingLakeSnapshot(null);
assertThat(committedLakeSnapshot2).isEqualTo(committedLakeSnapshot);
// use snapshot id 1 as the known snapshot id
committedLakeSnapshot = lakeCommitter.getMissingLakeSnapshot(1L);
// no any missing committed offset since the latest snapshot is 1L
assertThat(committedLakeSnapshot).isNull();
}
}
@Test
void testMultiPartitionTiering() throws Exception {
// Test multiple partitions: region + year
TablePath tablePath = TablePath.of("paimon", "test_multi_partition");
createMultiPartitionTable(tablePath);
TableDescriptor descriptor =
TableDescriptor.builder()
.schema(
org.apache.fluss.metadata.Schema.newBuilder()
.column("c1", org.apache.fluss.types.DataTypes.INT())
.column("c2", org.apache.fluss.types.DataTypes.STRING())
.column("region", org.apache.fluss.types.DataTypes.STRING())
.column("year", org.apache.fluss.types.DataTypes.STRING())
.build())
.partitionedBy("region", "year")
.distributedBy(1)
.property(ConfigOptions.TABLE_DATALAKE_ENABLED, true)
.build();
TableInfo tableInfo = TableInfo.of(tablePath, 0, 1, descriptor, 1L, 1L);
Map<String, List<LogRecord>> recordsByPartition = new HashMap<>();
List<PaimonWriteResult> paimonWriteResults = new ArrayList<>();
Map<TableBucket, Long> tableBucketOffsets = new HashMap<>();
// Test data for different partitions using $ separator
Map<Long, String> partitionIdAndName =
new HashMap<Long, String>() {
{
put(1L, "us-east$2024");
put(2L, "us-west$2024");
put(3L, "eu-central$2023");
}
};
int bucket = 0;
for (Map.Entry<Long, String> entry : partitionIdAndName.entrySet()) {
String partition = entry.getValue();
try (LakeWriter<PaimonWriteResult> lakeWriter =
createLakeWriter(tablePath, bucket, partition, entry.getKey(), tableInfo)) {
List<LogRecord> logRecords =
genLogTableRecordsForMultiPartition(partition, bucket, 3);
recordsByPartition.put(partition, logRecords);
for (LogRecord logRecord : logRecords) {
lakeWriter.write(logRecord);
}
tableBucketOffsets.put(new TableBucket(0, entry.getKey(), bucket), 3L);
PaimonWriteResult result = lakeWriter.complete();
paimonWriteResults.add(result);
}
}
// Commit all data
try (LakeCommitter<PaimonWriteResult, PaimonCommittable> lakeCommitter =
createLakeCommitter(tablePath)) {
PaimonCommittable committable = lakeCommitter.toCommittable(paimonWriteResults);
long snapshot =
lakeCommitter.commit(
committable,
toBucketOffsetsProperty(
tableBucketOffsets,
partitionIdAndName,
getPartitionKeys(tablePath)));
assertThat(snapshot).isEqualTo(1);
}
// Verify data for each partition
for (String partition : partitionIdAndName.values()) {
List<LogRecord> expectRecords = recordsByPartition.get(partition);
CloseableIterator<InternalRow> actualRecords =
getPaimonRowsMultiPartition(tablePath, partition);
verifyLogTableRecordsMultiPartition(actualRecords, expectRecords, bucket);
}
}
@Test
void testThreePartitionTiering() throws Exception {
// Test three partitions: region + year + month
TablePath tablePath = TablePath.of("paimon", "test_three_partition");
createThreePartitionTable(tablePath);
TableDescriptor descriptor =
TableDescriptor.builder()
.schema(
org.apache.fluss.metadata.Schema.newBuilder()
.column("c1", org.apache.fluss.types.DataTypes.INT())
.column("c2", org.apache.fluss.types.DataTypes.STRING())
.column("region", org.apache.fluss.types.DataTypes.STRING())
.column("year", org.apache.fluss.types.DataTypes.STRING())
.column("month", org.apache.fluss.types.DataTypes.STRING())
.build())
.partitionedBy("region", "year", "month")
.distributedBy(1)
.property(ConfigOptions.TABLE_DATALAKE_ENABLED, true)
.build();
TableInfo tableInfo = TableInfo.of(tablePath, 0, 1, descriptor, 1L, 1L);
Map<String, List<LogRecord>> recordsByPartition = new HashMap<>();
List<PaimonWriteResult> paimonWriteResults = new ArrayList<>();
Map<TableBucket, Long> tableBucketOffsets = new HashMap<>();
// Test data for different three-level partitions using $ separator
Map<Long, String> partitionIdAndName =
new LinkedHashMap<Long, String>() {
{
put(1L, "us-east$2024$01");
put(2L, "eu-central$2023$12");
}
};
int bucket = 0;
for (Map.Entry<Long, String> entry : partitionIdAndName.entrySet()) {
String partition = entry.getValue();
try (LakeWriter<PaimonWriteResult> lakeWriter =
createLakeWriter(tablePath, bucket, partition, entry.getKey(), tableInfo)) {
List<LogRecord> logRecords =
genLogTableRecordsForMultiPartition(
partition, bucket, 2); // Use same method
recordsByPartition.put(partition, logRecords);
for (LogRecord logRecord : logRecords) {
lakeWriter.write(logRecord);
}
tableBucketOffsets.put(new TableBucket(0, entry.getKey(), bucket), 2L);
PaimonWriteResult result = lakeWriter.complete();
paimonWriteResults.add(result);
}
}
// Commit all data
long snapshot;
try (LakeCommitter<PaimonWriteResult, PaimonCommittable> lakeCommitter =
createLakeCommitter(tablePath)) {
PaimonCommittable committable = lakeCommitter.toCommittable(paimonWriteResults);
snapshot =
lakeCommitter.commit(
committable,
toBucketOffsetsProperty(
tableBucketOffsets,
partitionIdAndName,
getPartitionKeys(tablePath)));
assertThat(snapshot).isEqualTo(1);
}
// check fluss offsets in paimon snapshot property
String offsetProperty = getSnapshotLogOffsetProperty(tablePath, snapshot);
assertThat(offsetProperty)
.isEqualTo(
"[{\"partition_id\":1,\"bucket\":0,\"partition_name\":\"region=us-east/year=2024/month=01\",\"offset\":2},"
+ "{\"partition_id\":2,\"bucket\":0,\"partition_name\":\"region=eu-central/year=2023/month=12\",\"offset\":2}]");
// Verify data for each partition
for (String partition : partitionIdAndName.values()) {
List<LogRecord> expectRecords = recordsByPartition.get(partition);
CloseableIterator<InternalRow> actualRecords =
getPaimonRowsThreePartition(tablePath, partition);
verifyLogTableRecordsThreePartition(actualRecords, expectRecords, bucket);
}
}
private void verifyLogTableRecordsMultiPartition(
CloseableIterator<InternalRow> actualRecords,
List<LogRecord> expectRecords,
int expectBucket)
throws Exception {
for (LogRecord expectRecord : expectRecords) {
InternalRow actualRow = actualRecords.next();
// check business columns:
assertThat(actualRow.getInt(0)).isEqualTo(expectRecord.getRow().getInt(0));
assertThat(actualRow.getString(1).toString())
.isEqualTo(expectRecord.getRow().getString(1).toString());
// check partition columns (should match record data)
assertThat(actualRow.getString(2).toString())
.isEqualTo(expectRecord.getRow().getString(2).toString()); // region
assertThat(actualRow.getString(3).toString())
.isEqualTo(expectRecord.getRow().getString(3).toString()); // year
// check system columns: __bucket, __offset, __timestamp
assertThat(actualRow.getInt(4)).isEqualTo(expectBucket);
assertThat(actualRow.getLong(5)).isEqualTo(expectRecord.logOffset());
assertThat(actualRow.getTimestamp(6, 6).getMillisecond())
.isEqualTo(expectRecord.timestamp());
}
assertThat(actualRecords.hasNext()).isFalse();
actualRecords.close();
}
private void verifyLogTableRecordsThreePartition(
CloseableIterator<InternalRow> actualRecords,
List<LogRecord> expectRecords,
int expectBucket)
throws Exception {
for (LogRecord expectRecord : expectRecords) {
InternalRow actualRow = actualRecords.next();
// check business columns:
assertThat(actualRow.getInt(0)).isEqualTo(expectRecord.getRow().getInt(0));
assertThat(actualRow.getString(1).toString())
.isEqualTo(expectRecord.getRow().getString(1).toString());
// check partition columns (should match record data)
assertThat(actualRow.getString(2).toString())
.isEqualTo(expectRecord.getRow().getString(2).toString()); // region
assertThat(actualRow.getString(3).toString())
.isEqualTo(expectRecord.getRow().getString(3).toString()); // year
assertThat(actualRow.getString(4).toString())
.isEqualTo(expectRecord.getRow().getString(4).toString()); // month
// check system columns: __bucket, __offset, __timestamp
assertThat(actualRow.getInt(5)).isEqualTo(expectBucket);
assertThat(actualRow.getLong(6)).isEqualTo(expectRecord.logOffset());
assertThat(actualRow.getTimestamp(7, 6).getMillisecond())
.isEqualTo(expectRecord.timestamp());
}
assertThat(actualRecords.hasNext()).isFalse();
actualRecords.close();
}
private void verifyTableRecords(
CloseableIterator<InternalRow> actualRecords,
List<LogRecord> expectRecords,
int expectBucket,
@Nullable String partition)
throws Exception {
for (LogRecord expectRecord : expectRecords) {
InternalRow actualRow = actualRecords.next();
// check business columns:
assertThat(actualRow.getInt(0)).isEqualTo(expectRecord.getRow().getInt(0));
assertThat(actualRow.getString(1).toString())
.isEqualTo(expectRecord.getRow().getString(1).toString());
assertThat(actualRow.getString(2).toString())
.isEqualTo(expectRecord.getRow().getString(2).toString());
if (partition != null) {
assertThat(actualRow.getString(2).toString()).isEqualTo(partition);
}
// check system columns: __bucket, __offset, __timestamp
assertThat(actualRow.getInt(3)).isEqualTo(expectBucket);
assertThat(actualRow.getLong(4)).isEqualTo(expectRecord.logOffset());
assertThat(actualRow.getTimestamp(5, 6).getMillisecond())
.isEqualTo(expectRecord.timestamp());
}
assertThat(actualRecords.hasNext()).isFalse();
actualRecords.close();
}
private Tuple2<List<LogRecord>, List<LogRecord>> genLogTableRecords(
@Nullable String partition, int bucket, int numRecords) {
List<LogRecord> logRecords = new ArrayList<>();
for (int i = 0; i < numRecords; i++) {
GenericRow genericRow;
// Partitioned table: include partition field in data
genericRow = new GenericRow(3); // c1, c2, c3(partition)
genericRow.setField(0, i);
genericRow.setField(1, BinaryString.fromString("bucket" + bucket + "_" + i));
if (partition != null) {
genericRow.setField(2, BinaryString.fromString(partition)); // partition field
} else {
genericRow.setField(2, BinaryString.fromString("bucket" + bucket));
}
LogRecord logRecord =
new GenericRecord(
i, System.currentTimeMillis(), ChangeType.APPEND_ONLY, genericRow);
logRecords.add(logRecord);
}
return Tuple2.of(logRecords, logRecords);
}
private List<LogRecord> genLogTableRecordsForMultiPartition(
String partition, int bucket, int numRecords) {
String[] partitionValues = partition.split("\\$");
List<LogRecord> logRecords = new ArrayList<>();
for (int i = 0; i < numRecords; i++) {
GenericRow genericRow =
new GenericRow(2 + partitionValues.length); // c1, c2, region, year
genericRow.setField(0, i);
genericRow.setField(
1, BinaryString.fromString(partitionValues[0] + "_data_" + bucket + "_" + i));
// Add partition fields to record data
for (int partitionIndex = 0;
partitionIndex < partitionValues.length;
partitionIndex++) {
genericRow.setField(
2 + partitionIndex,
BinaryString.fromString(partitionValues[partitionIndex]));
}
LogRecord logRecord =
new GenericRecord(
i, System.currentTimeMillis(), ChangeType.APPEND_ONLY, genericRow);
logRecords.add(logRecord);
}
return logRecords;
}
private Tuple2<List<LogRecord>, List<LogRecord>> genPrimaryKeyTableRecords(
@Nullable String partition, int bucket) {
int offset = -1;
// gen +I, -U, +U, -D
List<GenericRow> rows = genKvRow(partition, bucket, 0, 0, 4);
List<LogRecord> writtenLogRecords =
new ArrayList<>(
Arrays.asList(
toRecord(++offset, rows.get(0), INSERT),
toRecord(++offset, rows.get(1), UPDATE_BEFORE),
toRecord(++offset, rows.get(2), UPDATE_AFTER),
toRecord(++offset, rows.get(3), DELETE)));
List<LogRecord> expectLogRecords = new ArrayList<>();
// gen +I, -U, +U
rows = genKvRow(partition, bucket, 1, 4, 7);
writtenLogRecords.addAll(
Arrays.asList(
toRecord(++offset, rows.get(0), INSERT),
toRecord(++offset, rows.get(1), UPDATE_BEFORE),
toRecord(++offset, rows.get(2), UPDATE_AFTER)));
expectLogRecords.add(writtenLogRecords.get(writtenLogRecords.size() - 1));
// gen +I, +U
rows = genKvRow(partition, bucket, 2, 7, 9);
writtenLogRecords.addAll(
Arrays.asList(
toRecord(++offset, rows.get(0), INSERT),
toRecord(++offset, rows.get(1), UPDATE_AFTER)));
expectLogRecords.add(writtenLogRecords.get(writtenLogRecords.size() - 1));
// gen +I
rows = genKvRow(partition, bucket, 3, 9, 10);
writtenLogRecords.add(toRecord(++offset, rows.get(0), INSERT));
expectLogRecords.add(writtenLogRecords.get(writtenLogRecords.size() - 1));
return Tuple2.of(writtenLogRecords, expectLogRecords);
}
private List<GenericRow> genKvRow(
@Nullable String partition, int bucket, int key, int from, int to) {
List<GenericRow> rows = new ArrayList<>();
for (int i = from; i < to; i++) {
GenericRow genericRow;
if (partition != null) {
// Partitioned table: include partition field in data
genericRow = new GenericRow(3); // c1, c2, c3(partition)
genericRow.setField(0, key);
genericRow.setField(1, BinaryString.fromString("bucket" + bucket + "_" + i));
genericRow.setField(2, BinaryString.fromString(partition)); // partition field
} else {
// Non-partitioned table
genericRow = new GenericRow(3);
genericRow.setField(0, key);
genericRow.setField(1, BinaryString.fromString("bucket" + bucket + "_" + i));
genericRow.setField(2, BinaryString.fromString("bucket" + bucket));
}
rows.add(genericRow);
}
return rows;
}
private GenericRecord toRecord(long offset, GenericRow row, ChangeType changeType) {
return new GenericRecord(offset, System.currentTimeMillis(), changeType, row);
}
private CloseableIterator<InternalRow> getPaimonRows(
TablePath tablePath, @Nullable String partition, boolean isPrimaryKeyTable, int bucket)
throws Exception {
Identifier identifier = toPaimon(tablePath);
FileStoreTable fileStoreTable = (FileStoreTable) paimonCatalog.getTable(identifier);
ReadBuilder readBuilder = fileStoreTable.newReadBuilder();
if (partition != null) {
readBuilder =
readBuilder.withPartitionFilter(Collections.singletonMap("c3", partition));
}
List<Split> splits = new ArrayList<>();
if (isPrimaryKeyTable) {
splits = readBuilder.withBucketFilter(b -> b == bucket).newScan().plan().splits();
} else {
// for log table, we can't filter by bucket directly, filter file by __bucket column
for (Split split : readBuilder.newScan().plan().splits()) {
DataSplit dataSplit = (DataSplit) split;
// bucket is always 0
assertThat(dataSplit.bucket()).isEqualTo(0);
// filter by __bucket column, remove any data file that don't belone to this bucket
dataSplit
.dataFiles()
.removeIf(
dataFileMeta ->
!(dataFileMeta.valueStats().maxValues().getInt(3) == bucket
&& dataFileMeta.valueStats().minValues().getInt(3)
== bucket));
if (!dataSplit.dataFiles().isEmpty()) {
splits.add(split);
}
}
}
return readBuilder.newRead().createReader(splits).toCloseableIterator();
}
private CloseableIterator<InternalRow> getPaimonRowsMultiPartition(
TablePath tablePath, String partition) throws Exception {
Identifier identifier = toPaimon(tablePath);
FileStoreTable fileStoreTable = (FileStoreTable) paimonCatalog.getTable(identifier);
ReadBuilder readBuilder = fileStoreTable.newReadBuilder();
// Parse partition: "us-east$2024" -> ["us-east", "2024"]
String[] partitionValues = partition.split("\\$");
String region = partitionValues[0];
String year = partitionValues[1];
Map<String, String> partitionFilter = new HashMap<>();
partitionFilter.put("region", region);
partitionFilter.put("year", year);
readBuilder = readBuilder.withPartitionFilter(partitionFilter);
List<Split> splits = readBuilder.newScan().plan().splits();
return readBuilder.newRead().createReader(splits).toCloseableIterator();
}
private CloseableIterator<InternalRow> getPaimonRowsThreePartition(
TablePath tablePath, String partition) throws Exception {
Identifier identifier = toPaimon(tablePath);
FileStoreTable fileStoreTable = (FileStoreTable) paimonCatalog.getTable(identifier);
ReadBuilder readBuilder = fileStoreTable.newReadBuilder();
// Parse partition: "us-east$2024$01" -> ["us-east", "2024", "01"]
String[] partitionValues = partition.split("\\$");
String region = partitionValues[0];
String year = partitionValues[1];
String month = partitionValues[2];
Map<String, String> partitionFilter = new HashMap<>();
partitionFilter.put("region", region);
partitionFilter.put("year", year);
partitionFilter.put("month", month);
readBuilder = readBuilder.withPartitionFilter(partitionFilter);
List<Split> splits = readBuilder.newScan().plan().splits();
return readBuilder.newRead().createReader(splits).toCloseableIterator();
}
private LakeWriter<PaimonWriteResult> createLakeWriter(
TablePath tablePath,
int bucket,
@Nullable String partition,
@Nullable Long partitionId,
TableInfo tableInfo)
throws IOException {
return paimonLakeTieringFactory.createLakeWriter(
new WriterInitContext() {
@Override
public TablePath tablePath() {
return tablePath;
}
@Override
public TableBucket tableBucket() {
// don't care about tableId & partitionId
return new TableBucket(0, partitionId, bucket);
}
@Nullable
@Override
public String partition() {
return partition;
}
@Override
public TableInfo tableInfo() {
return tableInfo;
}
});
}
private LakeCommitter<PaimonWriteResult, PaimonCommittable> createLakeCommitter(
TablePath tablePath) throws IOException {
return paimonLakeTieringFactory.createLakeCommitter(() -> tablePath);
}
private void createTable(
TablePath tablePath,
boolean isPrimaryTable,
boolean isPartitioned,
@Nullable Integer numBuckets)
throws Exception {
Schema.Builder builder =
Schema.newBuilder()
.column("c1", DataTypes.INT())
.column("c2", DataTypes.STRING())
.column("c3", DataTypes.STRING());
if (isPartitioned) {
builder.partitionKeys("c3");
}
if (isPrimaryTable) {
if (isPartitioned) {
builder.primaryKey("c1", "c3");
} else {
builder.primaryKey("c1");
}
builder.option(
CoreOptions.CHANGELOG_PRODUCER.key(),
CoreOptions.ChangelogProducer.INPUT.toString());
}
if (numBuckets != null) {
builder.option(CoreOptions.BUCKET.key(), String.valueOf(numBuckets));
}
doCreatePaimonTable(tablePath, builder);
}
private void createMultiPartitionTable(TablePath tablePath) throws Exception {
Schema.Builder builder =
Schema.newBuilder()
.column("c1", DataTypes.INT())
.column("c2", DataTypes.STRING())
.column("region", DataTypes.STRING())
.column("year", DataTypes.STRING())
.partitionKeys("region", "year");
doCreatePaimonTable(tablePath, builder);
}
private void createThreePartitionTable(TablePath tablePath) throws Exception {
Schema.Builder builder =
Schema.newBuilder()
.column("c1", DataTypes.INT())
.column("c2", DataTypes.STRING())
.column("region", DataTypes.STRING())
.column("year", DataTypes.STRING())
.column("month", DataTypes.STRING())
.partitionKeys("region", "year", "month");
doCreatePaimonTable(tablePath, builder);
}
private void doCreatePaimonTable(TablePath tablePath, Schema.Builder paimonSchemaBuilder)
throws Exception {
paimonSchemaBuilder.column(BUCKET_COLUMN_NAME, DataTypes.INT());
paimonSchemaBuilder.column(OFFSET_COLUMN_NAME, DataTypes.BIGINT());
paimonSchemaBuilder.column(
TIMESTAMP_COLUMN_NAME, DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE());
paimonSchemaBuilder.option(
CoreOptions.COMMIT_CALLBACKS.key(),
PaimonLakeCommitter.PaimonCommitCallback.class.getName());
paimonCatalog.createDatabase(tablePath.getDatabaseName(), true);
paimonCatalog.createTable(toPaimon(tablePath), paimonSchemaBuilder.build(), true);
}
private String getSnapshotLogOffsetProperty(TablePath tablePath, long snapshotId)
throws Exception {
Identifier identifier = toPaimon(tablePath);
FileStoreTable fileStoreTable = (FileStoreTable) paimonCatalog.getTable(identifier);
return fileStoreTable
.snapshotManager()
.snapshot(snapshotId)
.properties()
.get(FLUSS_LAKE_SNAP_BUCKET_OFFSET_PROPERTY);
}
private List<String> getPartitionKeys(TablePath tablePath) throws Exception {
Identifier identifier = toPaimon(tablePath);
FileStoreTable fileStoreTable = (FileStoreTable) paimonCatalog.getTable(identifier);
return fileStoreTable.partitionKeys();
}
}
|
apache/pulsar | 38,177 | pulsar-broker-auth-oidc/src/test/java/org/apache/pulsar/broker/authentication/oidc/AuthenticationProviderOpenIDIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.authentication.oidc;
import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
import static com.github.tomakehurst.wiremock.client.WireMock.get;
import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo;
import static com.github.tomakehurst.wiremock.client.WireMock.urlMatching;
import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import com.github.tomakehurst.wiremock.WireMockServer;
import com.github.tomakehurst.wiremock.stubbing.Scenario;
import com.google.common.io.Resources;
import io.jsonwebtoken.SignatureAlgorithm;
import io.jsonwebtoken.impl.DefaultJwtBuilder;
import io.jsonwebtoken.io.Decoders;
import io.jsonwebtoken.security.Keys;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.KeyPair;
import java.security.PrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.util.Base64;
import java.util.Date;
import java.util.HashMap;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import javax.naming.AuthenticationException;
import lombok.Cleanup;
import org.apache.pulsar.broker.ServiceConfiguration;
import org.apache.pulsar.broker.authentication.AuthenticationDataCommand;
import org.apache.pulsar.broker.authentication.AuthenticationProvider;
import org.apache.pulsar.broker.authentication.AuthenticationProviderToken;
import org.apache.pulsar.broker.authentication.AuthenticationService;
import org.apache.pulsar.broker.authentication.AuthenticationState;
import org.apache.pulsar.broker.authentication.utils.AuthTokenUtils;
import org.apache.pulsar.common.api.AuthData;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
/**
* An integration test relying on WireMock to simulate an OpenID Connect provider.
*/
public class AuthenticationProviderOpenIDIntegrationTest {
AuthenticationProviderOpenID provider;
PrivateKey privateKey;
String caCert = Resources.getResource("certificate-authority/jks/broker.truststore.pem").getPath();
// These are the kid values for JWKs in the /keys endpoint
String validJwk = "valid";
String invalidJwk = "invalid";
String validJwkWithoutAlg = "valid_without_alg";
// The valid issuer
String issuer;
String issuerWithTrailingSlash;
String issuerWithMissingKid;
// This issuer is configured to return an issuer in the openid-configuration
// that does not match the issuer on the token
String issuerThatFails;
String issuerK8s;
WireMockServer server;
@BeforeClass
void beforeClass() throws IOException {
// Port matches the port supplied in the fakeKubeConfig.yaml resource, which makes the k8s integration
// tests work correctly.
server = new WireMockServer(wireMockConfig().dynamicHttpsPort()
.keystorePath(Resources.getResource("certificate-authority/jks/broker.keystore.jks").getPath())
.keystoreType("JKS")
.keyManagerPassword("111111")
.keystorePassword("111111"));
server.start();
issuer = server.baseUrl();
issuerWithTrailingSlash = issuer + "/trailing-slash/";
issuerWithMissingKid = issuer + "/missing-kid";
issuerThatFails = issuer + "/fail";
issuerK8s = issuer + "/k8s";
// Set up a correct openid-configuration
server.stubFor(
get(urlEqualTo("/.well-known/openid-configuration"))
.willReturn(aResponse()
.withHeader("Content-Type", "application/json")
.withBody("""
{
"issuer": "%s",
"jwks_uri": "%s/keys"
}
""".replace("%s", server.baseUrl()))));
// Set up a correct openid-configuration that the k8s integration test can use
// NOTE: integration tests revealed that the k8s client adds a trailing slash to the openid-configuration
// endpoint.
// NOTE: the jwks_uri is ignored, so we supply one that would fail here to ensure that we are not implicitly
// relying on the jwks_uri.
server.stubFor(
get(urlEqualTo("/k8s/.well-known/openid-configuration/"))
.willReturn(aResponse()
.withHeader("Content-Type", "application/json")
.withBody("""
{
"issuer": "%s",
"jwks_uri": "%s/no/keys/hosted/here"
}
""".formatted(issuer, issuer))));
// Set up a correct openid-configuration that has a trailing slash in the issuers URL. This is a
// behavior observed by Auth0. In this case, the token's iss claim also has a trailing slash.
// The server should normalize the URL and call the Authorization Server without the double slash.
// NOTE: the spec does not indicate that the jwks_uri must have the same prefix as the issuer, and that
// is used here to simplify the testing.
server.stubFor(
get(urlEqualTo("/trailing-slash/.well-known/openid-configuration"))
.willReturn(aResponse()
.withHeader("Content-Type", "application/json")
.withBody("""
{
"issuer": "%s",
"jwks_uri": "%s/keys"
}
""".formatted(issuerWithTrailingSlash, issuer))));
// Set up an incorrect openid-configuration where issuer does not match
server.stubFor(
get(urlEqualTo("/fail/.well-known/openid-configuration"))
.willReturn(aResponse()
.withHeader("Content-Type", "application/json")
.withBody("""
{
"issuer": "https://wrong-issuer.com",
"jwks_uri": "%s/keys"
}
""".formatted(server.baseUrl()))));
// Create the token key pair
KeyPair keyPair = Keys.keyPairFor(SignatureAlgorithm.RS256);
privateKey = keyPair.getPrivate();
RSAPublicKey rsaPublicKey = (RSAPublicKey) keyPair.getPublic();
String n = Base64.getUrlEncoder().encodeToString(rsaPublicKey.getModulus().toByteArray());
String e = Base64.getUrlEncoder().encodeToString(rsaPublicKey.getPublicExponent().toByteArray());
// Set up JWKS endpoint with a valid and an invalid public key
// The url matches are for both the normal and the k8s endpoints
server.stubFor(
get(urlMatching("/keys|/k8s/openid/v1/jwks/"))
.willReturn(aResponse()
.withHeader("Content-Type", "application/json")
.withBody(
"""
{
"keys" : [
{
"kid":"%s",
"kty":"RSA",
"alg":"RS256",
"n":"%s",
"e":"%s"
},
{
"kid": "%s",
"kty":"RSA",
"n":"invalid-key",
"e":"AQAB"
},
{
"kid":"%s",
"kty":"RSA",
"n":"%s",
"e":"%s"
}
]
}
""".formatted(validJwk, n, e, invalidJwk, validJwkWithoutAlg, n, e))));
server.stubFor(
get(urlEqualTo("/missing-kid/.well-known/openid-configuration"))
.willReturn(aResponse()
.withHeader("Content-Type", "application/json")
.withBody("""
{
"issuer": "%s",
"jwks_uri": "%s/keys"
}
""".formatted(issuerWithMissingKid, issuerWithMissingKid))));
// Set up JWKS endpoint where it first responds without the KID, then with the KID. This is a stateful stub.
// Note that the state machine is circular to make it easier to verify the two code paths that rely on
// this logic.
server.stubFor(
get(urlMatching("/missing-kid/keys"))
.inScenario("Changing KIDs")
.whenScenarioStateIs(Scenario.STARTED)
.willSetStateTo("serve-kid")
.willReturn(aResponse()
.withHeader("Content-Type", "application/json")
.withBody("{\"keys\":[]}")));
server.stubFor(
get(urlMatching("/missing-kid/keys"))
.inScenario("Changing KIDs")
.whenScenarioStateIs("serve-kid")
.willSetStateTo(Scenario.STARTED)
.willReturn(aResponse()
.withHeader("Content-Type", "application/json")
.withBody(
"""
{
"keys" : [
{
"kid":"%s",
"kty":"RSA",
"alg":"RS256",
"n":"%s",
"e":"%s"
}
]
}
""".formatted(validJwk, n, e))));
ServiceConfiguration conf = new ServiceConfiguration();
conf.setAuthenticationEnabled(true);
conf.setAuthenticationProviders(Set.of(AuthenticationProviderOpenID.class.getName()));
Properties props = conf.getProperties();
props.setProperty(AuthenticationProviderOpenID.ISSUER_TRUST_CERTS_FILE_PATH, caCert);
props.setProperty(AuthenticationProviderOpenID.ALLOWED_AUDIENCES, "allowed-audience");
props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, issuer + "," + issuerWithTrailingSlash
+ "," + issuerThatFails);
// Create the fake kube config file. This file is configured via the env vars and is written to the
// target directory so maven clean will remove it.
byte[] template = Files.readAllBytes(Path.of(System.getenv("KUBECONFIG_TEMPLATE")));
String kubeConfig = new String(template).replace("${WIRE_MOCK_PORT}", String.valueOf(server.port()));
Files.write(Path.of(System.getenv("KUBECONFIG")), kubeConfig.getBytes());
provider = new AuthenticationProviderOpenID();
provider.initialize(AuthenticationProvider.Context.builder().config(conf).build());
}
@AfterClass
void afterClass() throws IOException {
provider.close();
server.stop();
}
@BeforeMethod
public void beforeMethod() {
// Scenarios are stateful. Start each test with the correct state.
server.resetScenarios();
}
@Test
public void testTokenWithValidJWK() throws Exception {
String role = "superuser";
String token = generateToken(validJwk, issuer, role, "allowed-audience", 0L, 0L, 10000L);
assertEquals(role, provider.authenticateAsync(new AuthenticationDataCommand(token)).get());
}
@Test
public void testTokenWithValidJWKWithoutAlg() throws Exception {
String role = "superuser";
// test with a key in JWK that does not have an "alg" field. "alg" is optional in the JWK spec
String token = generateToken(validJwkWithoutAlg, issuer, role, "allowed-audience", 0L, 0L, 10000L);
assertEquals(role, provider.authenticateAsync(new AuthenticationDataCommand(token)).get());
}
@Test
public void testTokenWithTrailingSlashAndValidJWK() throws Exception {
String role = "superuser";
String token = generateToken(validJwk, issuer + "/trailing-slash/", role, "allowed-audience", 0L, 0L, 10000L);
assertEquals(role, provider.authenticateAsync(new AuthenticationDataCommand(token)).get());
}
@Test
public void testTokenWithInvalidJWK() throws Exception {
String role = "superuser";
String token = generateToken(invalidJwk, issuer, role, "allowed-audience", 0L, 0L, 10000L);
try {
provider.authenticateAsync(new AuthenticationDataCommand(token)).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof AuthenticationException, "Found exception: " + e.getCause());
}
}
@Test
public void testAuthorizationServerReturnsIncorrectIssuerInOpenidConnectConfiguration() throws Exception {
String role = "superuser";
String token = generateToken(validJwk, issuerThatFails, role, "allowed-audience", 0L, 0L, 10000L);
try {
provider.authenticateAsync(new AuthenticationDataCommand(token)).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof AuthenticationException, "Found exception: " + e.getCause());
}
}
@Test
public void testTokenWithInvalidAudience() throws Exception {
String role = "superuser";
String token = generateToken(validJwk, issuer, role, "invalid-audience", 0L, 0L, 10000L);
try {
provider.authenticateAsync(new AuthenticationDataCommand(token)).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof AuthenticationException, "Found exception: " + e.getCause());
}
}
@Test
public void testTokenWithInvalidIssuer() throws Exception {
String role = "superuser";
String token = generateToken(validJwk, "https://not-an-allowed-issuer.com", role,
"allowed-audience", 0L, 0L, 10000L);
try {
provider.authenticateAsync(new AuthenticationDataCommand(token)).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof AuthenticationException, "Found exception: " + e.getCause());
}
}
@Test
public void testKidCacheMissWhenRefreshConfigZero() throws Exception {
ServiceConfiguration conf = new ServiceConfiguration();
conf.setAuthenticationEnabled(true);
conf.setAuthenticationProviders(Set.of(AuthenticationProviderOpenID.class.getName()));
Properties props = conf.getProperties();
props.setProperty(AuthenticationProviderOpenID.ISSUER_TRUST_CERTS_FILE_PATH, caCert);
// Allows us to retrieve the JWK immediately after the cache miss of the KID
props.setProperty(AuthenticationProviderOpenID.KEY_ID_CACHE_MISS_REFRESH_SECONDS, "0");
props.setProperty(AuthenticationProviderOpenID.ALLOWED_AUDIENCES, "allowed-audience");
props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, issuerWithMissingKid);
@Cleanup
AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID();
provider.initialize(AuthenticationProvider.Context.builder().config(conf).build());
String role = "superuser";
String token = generateToken(validJwk, issuerWithMissingKid, role, "allowed-audience", 0L, 0L, 10000L);
assertEquals(role, provider.authenticateAsync(new AuthenticationDataCommand(token)).get());
}
@Test
public void testKidCacheMissWhenRefreshConfigLongerThanDelta() throws Exception {
ServiceConfiguration conf = new ServiceConfiguration();
conf.setAuthenticationEnabled(true);
conf.setAuthenticationProviders(Set.of(AuthenticationProviderOpenID.class.getName()));
Properties props = conf.getProperties();
props.setProperty(AuthenticationProviderOpenID.ISSUER_TRUST_CERTS_FILE_PATH, caCert);
// This value is high enough that the provider will not refresh the JWK
props.setProperty(AuthenticationProviderOpenID.KEY_ID_CACHE_MISS_REFRESH_SECONDS, "100");
props.setProperty(AuthenticationProviderOpenID.ALLOWED_AUDIENCES, "allowed-audience");
props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, issuerWithMissingKid);
@Cleanup
AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID();
provider.initialize(AuthenticationProvider.Context.builder().config(conf).build());
String role = "superuser";
String token = generateToken(validJwk, issuerWithMissingKid, role, "allowed-audience", 0L, 0L, 10000L);
try {
provider.authenticateAsync(new AuthenticationDataCommand(token)).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException, "Found exception: " + e.getCause());
assertTrue(e.getCause().getMessage().contains("No JWK found for Key ID valid"),
"Found exception: " + e.getCause());
}
}
@Test
public void testKubernetesApiServerAsDiscoverTrustedIssuerSuccess() throws Exception {
ServiceConfiguration conf = new ServiceConfiguration();
conf.setAuthenticationEnabled(true);
conf.setAuthenticationProviders(Set.of(AuthenticationProviderOpenID.class.getName()));
Properties props = conf.getProperties();
props.setProperty(AuthenticationProviderOpenID.ISSUER_TRUST_CERTS_FILE_PATH, caCert);
props.setProperty(AuthenticationProviderOpenID.ALLOWED_AUDIENCES, "allowed-audience");
props.setProperty(AuthenticationProviderOpenID.FALLBACK_DISCOVERY_MODE, "KUBERNETES_DISCOVER_TRUSTED_ISSUER");
// Test requires that k8sIssuer is not in the allowed token issuers
props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, "");
@Cleanup
AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID();
provider.initialize(AuthenticationProvider.Context.builder().config(conf).build());
String role = "superuser";
// We use the normal issuer on the token because the /k8s endpoint is configured via the kube config file
// made as part of the test setup. The kube client then gets the issuer from the /k8s endpoint and discovers
// this issuer.
String token = generateToken(validJwk, issuer, role, "allowed-audience", 0L, 0L, 10000L);
assertEquals(role, provider.authenticateAsync(new AuthenticationDataCommand(token)).get());
// Ensure that a subsequent token with a different issuer still fails due to invalid issuer exception
String token2 = generateToken(validJwk, "http://not-the-k8s-issuer", role, "allowed-audience", 0L, 0L, 10000L);
try {
provider.authenticateAsync(new AuthenticationDataCommand(token2)).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof AuthenticationException, "Found exception: " + e.getCause());
assertTrue(e.getCause().getMessage().contains("Issuer not allowed"),
"Unexpected error message: " + e.getMessage());
}
}
@Test
public void testKubernetesApiServerAsDiscoverTrustedIssuerFailsDueToMismatchedIssuerClaim() throws Exception {
ServiceConfiguration conf = new ServiceConfiguration();
conf.setAuthenticationEnabled(true);
conf.setAuthenticationProviders(Set.of(AuthenticationProviderOpenID.class.getName()));
Properties props = conf.getProperties();
props.setProperty(AuthenticationProviderOpenID.ISSUER_TRUST_CERTS_FILE_PATH, caCert);
props.setProperty(AuthenticationProviderOpenID.ALLOWED_AUDIENCES, "allowed-audience");
props.setProperty(AuthenticationProviderOpenID.FALLBACK_DISCOVERY_MODE, "KUBERNETES_DISCOVER_TRUSTED_ISSUER");
props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, "");
@Cleanup
AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID();
provider.initialize(AuthenticationProvider.Context.builder().config(conf).build());
String role = "superuser";
String token = generateToken(validJwk, "http://not-the-k8s-issuer", role, "allowed-audience", 0L, 0L, 10000L);
try {
provider.authenticateAsync(new AuthenticationDataCommand(token)).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof AuthenticationException, "Found exception: " + e.getCause());
}
}
@Test
public void testKubernetesApiServerAsDiscoverPublicKeySuccess() throws Exception {
ServiceConfiguration conf = new ServiceConfiguration();
conf.setAuthenticationEnabled(true);
conf.setAuthenticationProviders(Set.of(AuthenticationProviderOpenID.class.getName()));
Properties props = conf.getProperties();
props.setProperty(AuthenticationProviderOpenID.ISSUER_TRUST_CERTS_FILE_PATH, caCert);
props.setProperty(AuthenticationProviderOpenID.ALLOWED_AUDIENCES, "allowed-audience");
props.setProperty(AuthenticationProviderOpenID.FALLBACK_DISCOVERY_MODE, "KUBERNETES_DISCOVER_PUBLIC_KEYS");
// Test requires that k8sIssuer is not in the allowed token issuers
props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, "");
@Cleanup
AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID();
provider.initialize(AuthenticationProvider.Context.builder().config(conf).build());
String role = "superuser";
String token = generateToken(validJwk, issuer, role, "allowed-audience", 0L, 0L, 10000L);
assertEquals(role, provider.authenticateAsync(new AuthenticationDataCommand(token)).get());
// Ensure that a subsequent token with a different issuer still fails due to invalid issuer exception
String token2 = generateToken(validJwk, "http://not-the-k8s-issuer", role, "allowed-audience", 0L, 0L, 10000L);
try {
provider.authenticateAsync(new AuthenticationDataCommand(token2)).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof AuthenticationException, "Found exception: " + e.getCause());
assertTrue(e.getCause().getMessage().contains("Issuer not allowed"),
"Unexpected error message: " + e.getMessage());
}
}
@Test
public void testKubernetesApiServerAsDiscoverPublicKeyFailsDueToMismatchedIssuerClaim() throws Exception {
ServiceConfiguration conf = new ServiceConfiguration();
conf.setAuthenticationEnabled(true);
conf.setAuthenticationProviders(Set.of(AuthenticationProviderOpenID.class.getName()));
Properties props = conf.getProperties();
props.setProperty(AuthenticationProviderOpenID.ISSUER_TRUST_CERTS_FILE_PATH, caCert);
props.setProperty(AuthenticationProviderOpenID.ALLOWED_AUDIENCES, "allowed-audience");
props.setProperty(AuthenticationProviderOpenID.FALLBACK_DISCOVERY_MODE, "KUBERNETES_DISCOVER_PUBLIC_KEYS");
props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, "");
@Cleanup
AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID();
provider.initialize(AuthenticationProvider.Context.builder().config(conf).build());
String role = "superuser";
String token = generateToken(validJwk, "http://not-the-k8s-issuer", role, "allowed-audience", 0L, 0L, 10000L);
try {
provider.authenticateAsync(new AuthenticationDataCommand(token)).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof AuthenticationException, "Found exception: " + e.getCause());
}
}
@Test
public void testAuthenticationStateOpenIDForValidToken() throws Exception {
String role = "superuser";
String token = generateToken(validJwk, issuer, role, "allowed-audience", 0L, 0L, 10000L);
AuthenticationState state = provider.newAuthState(null, null, null);
AuthData result = state.authenticateAsync(AuthData.of(token.getBytes())).get();
assertNull(result);
assertEquals(state.getAuthRole(), role);
assertEquals(state.getAuthDataSource().getCommandData(), token);
assertFalse(state.isExpired());
}
@Test
public void testAuthenticationStateOpenIDForExpiredToken() throws Exception {
String role = "superuser";
String token = generateToken(validJwk, issuer, role, "allowed-audience", 0L, 0L, -10000L);
AuthenticationState state = provider.newAuthState(null, null, null);
try {
state.authenticateAsync(AuthData.of(token.getBytes())).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof AuthenticationException, "Found exception: " + e.getCause());
}
}
@Test
public void testAuthenticationStateOpenIDForValidTokenWithNoExp() throws Exception {
String role = "superuser";
String token = generateToken(validJwk, issuer, role, "allowed-audience", 0L, 0L, null);
AuthenticationState state = provider.newAuthState(null, null, null);
try {
state.authenticateAsync(AuthData.of(token.getBytes())).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof AuthenticationException, "Found exception: " + e.getCause());
}
}
@Test
public void testAuthenticationStateOpenIDForTokenExpiration() throws Exception {
ServiceConfiguration conf = new ServiceConfiguration();
conf.setAuthenticationEnabled(true);
conf.setAuthenticationProviders(Set.of(AuthenticationProviderOpenID.class.getName()));
Properties props = conf.getProperties();
props.setProperty(AuthenticationProviderOpenID.ISSUER_TRUST_CERTS_FILE_PATH, caCert);
props.setProperty(AuthenticationProviderOpenID.ALLOWED_AUDIENCES, "allowed-audience");
props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, issuer);
// Use the leeway to allow the token to pass validation and then fail expiration
props.setProperty(AuthenticationProviderOpenID.ACCEPTED_TIME_LEEWAY_SECONDS, "10");
@Cleanup
AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID();
provider.initialize(AuthenticationProvider.Context.builder().config(conf).build());
String role = "superuser";
String token = generateToken(validJwk, issuer, role, "allowed-audience", 0L, 0L, 0L);
AuthenticationState state = provider.newAuthState(null, null, null);
AuthData result = state.authenticateAsync(AuthData.of(token.getBytes())).get();
assertNull(result);
assertEquals(state.getAuthRole(), role);
assertEquals(state.getAuthDataSource().getCommandData(), token);
assertTrue(state.isExpired());
}
/**
* This test covers the migration scenario where you have both the Token and OpenID providers. It ensures
* both kinds of authentication work.
* @throws Exception
*/
@Test
public void testAuthenticationProviderListStateSuccess() throws Exception {
ServiceConfiguration conf = new ServiceConfiguration();
conf.setAuthenticationEnabled(true);
conf.setAuthenticationProviders(Set.of(AuthenticationProviderOpenID.class.getName(),
AuthenticationProviderToken.class.getName()));
Properties props = conf.getProperties();
props.setProperty(AuthenticationProviderOpenID.ISSUER_TRUST_CERTS_FILE_PATH, caCert);
props.setProperty(AuthenticationProviderOpenID.ALLOWED_AUDIENCES, "allowed-audience");
props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, issuer);
// Set up static token
KeyPair keyPair = Keys.keyPairFor(SignatureAlgorithm.RS256);
// Use public key for validation
String publicKeyStr = AuthTokenUtils.encodeKeyBase64(keyPair.getPublic());
props.setProperty("tokenPublicKey", publicKeyStr);
// Use private key to generate token
String privateKeyStr = AuthTokenUtils.encodeKeyBase64(keyPair.getPrivate());
PrivateKey privateKey = AuthTokenUtils.decodePrivateKey(Decoders.BASE64.decode(privateKeyStr),
SignatureAlgorithm.RS256);
String staticToken = AuthTokenUtils.createToken(privateKey, "superuser", Optional.empty());
@Cleanup
AuthenticationService service = new AuthenticationService(conf);
AuthenticationProvider provider = service.getAuthenticationProvider("token");
// First, authenticate using OIDC
String role = "superuser";
String oidcToken = generateToken(validJwk, issuer, role, "allowed-audience", 0L, 0L, 10000L);
assertEquals(role, provider.authenticateAsync(new AuthenticationDataCommand(oidcToken)).get());
// Authenticate using the static token
assertEquals("superuser", provider.authenticateAsync(new AuthenticationDataCommand(staticToken)).get());
// Use authenticationState to authentication using OIDC
AuthenticationState state1 = service.getAuthenticationProvider("token").newAuthState(null, null, null);
assertNull(state1.authenticateAsync(AuthData.of(oidcToken.getBytes())).get());
assertEquals(state1.getAuthRole(), role);
// Use authenticationState to authentication using static token
AuthenticationState state2 = service.getAuthenticationProvider("token").newAuthState(null, null, null);
assertNull(state2.authenticateAsync(AuthData.of(staticToken.getBytes())).get());
assertEquals(state1.getAuthRole(), role);
}
@Test
void ensureRoleClaimForNonSubClaimReturnsRole() throws Exception {
@Cleanup
AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID();
Properties props = new Properties();
props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, issuer);
props.setProperty(AuthenticationProviderOpenID.ALLOWED_AUDIENCES, "allowed-audience");
props.setProperty(AuthenticationProviderOpenID.ROLE_CLAIM, "test");
props.setProperty(AuthenticationProviderOpenID.ISSUER_TRUST_CERTS_FILE_PATH, caCert);
ServiceConfiguration config = new ServiceConfiguration();
config.setProperties(props);
provider.initialize(AuthenticationProvider.Context.builder().config(config).build());
// Build a JWT with a custom claim
HashMap<String, Object> claims = new HashMap();
claims.put("test", "my-role");
String token = generateToken(validJwk, issuer, "not-my-role", "allowed-audience", 0L,
0L, 10000L, claims);
assertEquals(provider.authenticateAsync(new AuthenticationDataCommand(token)).get(), "my-role");
}
@Test
void ensureRoleClaimForNonSubClaimFailsWhenClaimIsMissing() throws Exception {
@Cleanup
AuthenticationProviderOpenID provider = new AuthenticationProviderOpenID();
Properties props = new Properties();
props.setProperty(AuthenticationProviderOpenID.ALLOWED_TOKEN_ISSUERS, issuer);
props.setProperty(AuthenticationProviderOpenID.ALLOWED_AUDIENCES, "allowed-audience");
props.setProperty(AuthenticationProviderOpenID.ROLE_CLAIM, "test");
props.setProperty(AuthenticationProviderOpenID.ISSUER_TRUST_CERTS_FILE_PATH, caCert);
ServiceConfiguration config = new ServiceConfiguration();
config.setProperties(props);
provider.initialize(AuthenticationProvider.Context.builder().config(config).build());
// Build a JWT without the "test" claim, which should cause the authentication to fail
String token = generateToken(validJwk, issuer, "not-my-role", "allowed-audience", 0L,
0L, 10000L);
try {
provider.authenticateAsync(new AuthenticationDataCommand(token)).get();
fail("Expected exception");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof AuthenticationException, "Found exception: " + e.getCause());
}
}
// This test is somewhat counterintuitive. We allow the state object to change roles, but then we fail it
// in the ServerCnx handling of the state object. As such, it is essential that the state object allow
// the role to change.
@Test
public void testAuthenticationStateOpenIDAllowsRoleChange() throws Exception {
String role1 = "superuser";
String token1 = generateToken(validJwk, issuer, role1, "allowed-audience", 0L, 0L, 10000L);
String role2 = "otheruser";
String token2 = generateToken(validJwk, issuer, role2, "allowed-audience", 0L, 0L, 10000L);
AuthenticationState state = provider.newAuthState(null, null, null);
AuthData result1 = state.authenticateAsync(AuthData.of(token1.getBytes())).get();
assertNull(result1);
assertEquals(state.getAuthRole(), role1);
assertEquals(state.getAuthDataSource().getCommandData(), token1);
assertFalse(state.isExpired());
AuthData result2 = state.authenticateAsync(AuthData.of(token2.getBytes())).get();
assertNull(result2);
assertEquals(state.getAuthRole(), role2);
assertEquals(state.getAuthDataSource().getCommandData(), token2);
assertFalse(state.isExpired());
}
private String generateToken(String kid, String issuer, String subject, String audience,
Long iatOffset, Long nbfOffset, Long expOffset) {
return generateToken(kid, issuer, subject, audience, iatOffset, nbfOffset, expOffset, new HashMap<>());
}
private String generateToken(String kid, String issuer, String subject, String audience,
Long iatOffset, Long nbfOffset, Long expOffset, HashMap<String, Object> extraClaims) {
long now = System.currentTimeMillis();
DefaultJwtBuilder defaultJwtBuilder = new DefaultJwtBuilder();
defaultJwtBuilder.setHeaderParam("kid", kid);
defaultJwtBuilder.setHeaderParam("typ", "JWT");
defaultJwtBuilder.setHeaderParam("alg", "RS256");
defaultJwtBuilder.setIssuer(issuer);
defaultJwtBuilder.setSubject(subject);
defaultJwtBuilder.setAudience(audience);
defaultJwtBuilder.setIssuedAt(iatOffset != null ? new Date(now + iatOffset) : null);
defaultJwtBuilder.setNotBefore(nbfOffset != null ? new Date(now + nbfOffset) : null);
defaultJwtBuilder.setExpiration(expOffset != null ? new Date(now + expOffset) : null);
defaultJwtBuilder.addClaims(extraClaims);
defaultJwtBuilder.signWith(privateKey);
return defaultJwtBuilder.compact();
}
}
|
googleapis/google-cloud-java | 38,022 | java-video-live-stream/proto-google-cloud-live-stream-v1/src/main/java/com/google/cloud/video/livestream/v1/ListClipsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/video/livestream/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.video.livestream.v1;
/**
*
*
* <pre>
* Request message for "LivestreamService.ListClips".
* </pre>
*
* Protobuf type {@code google.cloud.video.livestream.v1.ListClipsRequest}
*/
public final class ListClipsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.video.livestream.v1.ListClipsRequest)
ListClipsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListClipsRequest.newBuilder() to construct.
private ListClipsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListClipsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListClipsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.livestream.v1.ServiceProto
.internal_static_google_cloud_video_livestream_v1_ListClipsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.livestream.v1.ServiceProto
.internal_static_google_cloud_video_livestream_v1_ListClipsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.livestream.v1.ListClipsRequest.class,
com.google.cloud.video.livestream.v1.ListClipsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListClipsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListClipsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.video.livestream.v1.ListClipsRequest)) {
return super.equals(obj);
}
com.google.cloud.video.livestream.v1.ListClipsRequest other =
(com.google.cloud.video.livestream.v1.ListClipsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.video.livestream.v1.ListClipsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for "LivestreamService.ListClips".
* </pre>
*
* Protobuf type {@code google.cloud.video.livestream.v1.ListClipsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.video.livestream.v1.ListClipsRequest)
com.google.cloud.video.livestream.v1.ListClipsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.livestream.v1.ServiceProto
.internal_static_google_cloud_video_livestream_v1_ListClipsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.livestream.v1.ServiceProto
.internal_static_google_cloud_video_livestream_v1_ListClipsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.livestream.v1.ListClipsRequest.class,
com.google.cloud.video.livestream.v1.ListClipsRequest.Builder.class);
}
// Construct using com.google.cloud.video.livestream.v1.ListClipsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.video.livestream.v1.ServiceProto
.internal_static_google_cloud_video_livestream_v1_ListClipsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.ListClipsRequest getDefaultInstanceForType() {
return com.google.cloud.video.livestream.v1.ListClipsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.ListClipsRequest build() {
com.google.cloud.video.livestream.v1.ListClipsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.ListClipsRequest buildPartial() {
com.google.cloud.video.livestream.v1.ListClipsRequest result =
new com.google.cloud.video.livestream.v1.ListClipsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.video.livestream.v1.ListClipsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.video.livestream.v1.ListClipsRequest) {
return mergeFrom((com.google.cloud.video.livestream.v1.ListClipsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.video.livestream.v1.ListClipsRequest other) {
if (other == com.google.cloud.video.livestream.v1.ListClipsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListClipsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListClipsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListClipsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListClipsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListClipsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.video.livestream.v1.ListClipsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.video.livestream.v1.ListClipsRequest)
private static final com.google.cloud.video.livestream.v1.ListClipsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.video.livestream.v1.ListClipsRequest();
}
public static com.google.cloud.video.livestream.v1.ListClipsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListClipsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListClipsRequest>() {
@java.lang.Override
public ListClipsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListClipsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListClipsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.video.livestream.v1.ListClipsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,250 | java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/UpdateTagTemplateRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/v1/datacatalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.v1;
/**
*
*
* <pre>
* Request message for
* [UpdateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.UpdateTagTemplateRequest}
*/
public final class UpdateTagTemplateRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1.UpdateTagTemplateRequest)
UpdateTagTemplateRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateTagTemplateRequest.newBuilder() to construct.
private UpdateTagTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateTagTemplateRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateTagTemplateRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_UpdateTagTemplateRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_UpdateTagTemplateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest.class,
com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest.Builder.class);
}
private int bitField0_;
public static final int TAG_TEMPLATE_FIELD_NUMBER = 1;
private com.google.cloud.datacatalog.v1.TagTemplate tagTemplate_;
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tagTemplate field is set.
*/
@java.lang.Override
public boolean hasTagTemplate() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tagTemplate.
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.TagTemplate getTagTemplate() {
return tagTemplate_ == null
? com.google.cloud.datacatalog.v1.TagTemplate.getDefaultInstance()
: tagTemplate_;
}
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.v1.TagTemplateOrBuilder getTagTemplateOrBuilder() {
return tagTemplate_ == null
? com.google.cloud.datacatalog.v1.TagTemplate.getDefaultInstance()
: tagTemplate_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTagTemplate());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTagTemplate());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest other =
(com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest) obj;
if (hasTagTemplate() != other.hasTagTemplate()) return false;
if (hasTagTemplate()) {
if (!getTagTemplate().equals(other.getTagTemplate())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTagTemplate()) {
hash = (37 * hash) + TAG_TEMPLATE_FIELD_NUMBER;
hash = (53 * hash) + getTagTemplate().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [UpdateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.v1.UpdateTagTemplateRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1.UpdateTagTemplateRequest)
com.google.cloud.datacatalog.v1.UpdateTagTemplateRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_UpdateTagTemplateRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_UpdateTagTemplateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest.class,
com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest.Builder.class);
}
// Construct using com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTagTemplateFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
tagTemplate_ = null;
if (tagTemplateBuilder_ != null) {
tagTemplateBuilder_.dispose();
tagTemplateBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.v1.Datacatalog
.internal_static_google_cloud_datacatalog_v1_UpdateTagTemplateRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest getDefaultInstanceForType() {
return com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest build() {
com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest buildPartial() {
com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest result =
new com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.tagTemplate_ =
tagTemplateBuilder_ == null ? tagTemplate_ : tagTemplateBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest) {
return mergeFrom((com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest other) {
if (other == com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest.getDefaultInstance())
return this;
if (other.hasTagTemplate()) {
mergeTagTemplate(other.getTagTemplate());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getTagTemplateFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.datacatalog.v1.TagTemplate tagTemplate_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.TagTemplate,
com.google.cloud.datacatalog.v1.TagTemplate.Builder,
com.google.cloud.datacatalog.v1.TagTemplateOrBuilder>
tagTemplateBuilder_;
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tagTemplate field is set.
*/
public boolean hasTagTemplate() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tagTemplate.
*/
public com.google.cloud.datacatalog.v1.TagTemplate getTagTemplate() {
if (tagTemplateBuilder_ == null) {
return tagTemplate_ == null
? com.google.cloud.datacatalog.v1.TagTemplate.getDefaultInstance()
: tagTemplate_;
} else {
return tagTemplateBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTagTemplate(com.google.cloud.datacatalog.v1.TagTemplate value) {
if (tagTemplateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tagTemplate_ = value;
} else {
tagTemplateBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTagTemplate(
com.google.cloud.datacatalog.v1.TagTemplate.Builder builderForValue) {
if (tagTemplateBuilder_ == null) {
tagTemplate_ = builderForValue.build();
} else {
tagTemplateBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTagTemplate(com.google.cloud.datacatalog.v1.TagTemplate value) {
if (tagTemplateBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& tagTemplate_ != null
&& tagTemplate_ != com.google.cloud.datacatalog.v1.TagTemplate.getDefaultInstance()) {
getTagTemplateBuilder().mergeFrom(value);
} else {
tagTemplate_ = value;
}
} else {
tagTemplateBuilder_.mergeFrom(value);
}
if (tagTemplate_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTagTemplate() {
bitField0_ = (bitField0_ & ~0x00000001);
tagTemplate_ = null;
if (tagTemplateBuilder_ != null) {
tagTemplateBuilder_.dispose();
tagTemplateBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.v1.TagTemplate.Builder getTagTemplateBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTagTemplateFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.v1.TagTemplateOrBuilder getTagTemplateOrBuilder() {
if (tagTemplateBuilder_ != null) {
return tagTemplateBuilder_.getMessageOrBuilder();
} else {
return tagTemplate_ == null
? com.google.cloud.datacatalog.v1.TagTemplate.getDefaultInstance()
: tagTemplate_;
}
}
/**
*
*
* <pre>
* Required. The template to update. The `name` field must be set.
* </pre>
*
* <code>
* .google.cloud.datacatalog.v1.TagTemplate tag_template = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.TagTemplate,
com.google.cloud.datacatalog.v1.TagTemplate.Builder,
com.google.cloud.datacatalog.v1.TagTemplateOrBuilder>
getTagTemplateFieldBuilder() {
if (tagTemplateBuilder_ == null) {
tagTemplateBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.v1.TagTemplate,
com.google.cloud.datacatalog.v1.TagTemplate.Builder,
com.google.cloud.datacatalog.v1.TagTemplateOrBuilder>(
getTagTemplate(), getParentForChildren(), isClean());
tagTemplate_ = null;
}
return tagTemplateBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Names of fields whose values to overwrite on a tag template. Currently,
* only `display_name` and `is_publicly_readable` can be overwritten.
*
* If this parameter is absent or empty, all modifiable fields
* are overwritten. If such fields are non-required and omitted in the
* request body, their values are emptied.
*
* Note: Updating the `is_publicly_readable` field may require up to 12
* hours to take effect in search results.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1.UpdateTagTemplateRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1.UpdateTagTemplateRequest)
private static final com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest();
}
public static com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateTagTemplateRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateTagTemplateRequest>() {
@java.lang.Override
public UpdateTagTemplateRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateTagTemplateRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateTagTemplateRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/coherence | 38,230 | prj/test/functional/concurrent/src/main/java/concurrent/locks/AbstractClusteredRemoteReadWriteLockIT.java | /*
* Copyright (c) 2000, 2025, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* https://oss.oracle.com/licenses/upl.
*/
package concurrent.locks;
import com.oracle.bedrock.junit.CoherenceClusterExtension;
import com.oracle.bedrock.runtime.coherence.CoherenceClusterMember;
import com.oracle.bedrock.runtime.concurrent.RemoteCallable;
import com.oracle.bedrock.runtime.concurrent.RemoteChannel;
import com.oracle.bedrock.runtime.concurrent.RemoteEvent;
import com.oracle.bedrock.runtime.concurrent.RemoteEventListener;
import com.oracle.bedrock.runtime.concurrent.callable.RemoteCallableStaticMethod;
import com.oracle.bedrock.testsupport.junit.AbstractTestLogs;
import com.oracle.coherence.common.base.Logger;
import com.oracle.coherence.concurrent.atomic.Atomics;
import com.oracle.coherence.concurrent.locks.Locks;
import com.oracle.coherence.concurrent.locks.RemoteReadWriteLock;
import com.tangosol.util.Base;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import java.io.Serializable;
import java.time.Duration;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.lessThan;
/**
* Test distributed read/write locks across multiple cluster members.
* <p>
* This class must be Serializable so that its methods can be used as
* remote callables by Bedrock.
*/
public abstract class AbstractClusteredRemoteReadWriteLockIT
implements Serializable
{
// ----- constructors ---------------------------------------------------
AbstractClusteredRemoteReadWriteLockIT(CoherenceClusterExtension coherenceResource)
{
m_coherenceResource = coherenceResource;
}
@BeforeEach
void beforeEach(TestInfo info)
{
// print a message in the logs of all the cluster members that are still running
// to indicate the name of the test that is about to start
String sMessage = ">>>>> Starting test method " + info.getDisplayName();
logOnEachMember(sMessage);
}
@AfterEach
void after(TestInfo info)
{
// print a message in the logs of all the cluster members that are still running
// to indicate the name of the test that has just finished
String sMessage = "<<<<< Completed test method " + info.getDisplayName();
logOnEachMember(sMessage);
}
private void logOnEachMember(String sMessage)
{
m_coherenceResource.getCluster()
.forEach(m ->
{
try
{
m.invoke(() ->
{
Logger.info(sMessage);
return null;
});
}
catch (Throwable ignore)
{
// ignoring "RemoteChannel is closed" exception
// from members that were shut down
}
});
}
@Test
public void shouldAcquireAndReleaseLocksOnStorageMember()
{
// Get a storage member from the cluster
CoherenceClusterMember member = m_coherenceResource.getCluster().get("storage-1");
// If any assertions in the methods below fail this method will throw an exception
member.invoke(this::shouldAcquireAndReleaseWriteLock);
member.invoke(this::shouldAcquireAndReleaseReadLock);
}
@Test
public void shouldAcquireAndReleaseLocksOnStorageDisabledMember()
{
// Get a storage disabled application member from the cluster
CoherenceClusterMember member = m_coherenceResource.getCluster().get("application-1");
// If any assertions in the methods below fail this method will throw an exception
member.invoke(this::shouldAcquireAndReleaseWriteLock);
member.invoke(this::shouldAcquireAndReleaseReadLock);
}
/**
* This test method is invoked on remote processes by Bedrock.
*
* This method must have a return value as it is invoked as a
* RemoteCallable so that the invoke call blocks until the
* method has completes. In this case we do not care about the
* actual return value, so we use Void.
*
* If any of the assertions fail, the invoke call in the test will fail.
*
* @return always returns Void (null).
*/
Void shouldAcquireAndReleaseWriteLock()
{
Logger.info("In shouldAcquireAndReleaseWriteLock()");
RemoteReadWriteLock lock = Locks.remoteReadWriteLock("foo");
lock.writeLock().lock();
System.out.println("Write lock acquired by " + lock.getOwner());
assertThat(lock.isWriteLocked(), is(true));
assertThat(lock.isWriteLockedByCurrentThread(), is(true));
assertThat(lock.getWriteHoldCount(), is(1));
lock.writeLock().unlock();
assertThat(lock.isWriteLocked(), is(false));
assertThat(lock.isWriteLockedByCurrentThread(), is(false));
assertThat(lock.getWriteHoldCount(), is(0));
System.out.println("Write lock released by " + Thread.currentThread());
return null;
}
Void shouldAcquireAndReleaseReadLock()
{
Logger.info("In shouldAcquireAndReleaseReadLock()");
RemoteReadWriteLock lock = Locks.remoteReadWriteLock("foo");
lock.readLock().lock();
System.out.println("Read lock acquired by " + Thread.currentThread());
assertThat(lock.isReadLocked(), is(true));
assertThat(lock.getReadLockCount(), is(1));
assertThat(lock.getReadHoldCount(), is(1));
lock.readLock().unlock();
assertThat(lock.isReadLocked(), is(false));
assertThat(lock.getReadLockCount(), is(0));
assertThat(lock.getReadHoldCount(), is(0));
System.out.println("Read lock released by " + Thread.currentThread());
return null;
}
@Test
void shouldTimeOutIfWriteLockIsHeldByAnotherMemberUsingStorageMembers() throws Exception
{
// Get storage members from the cluster
CoherenceClusterMember member1 = m_coherenceResource.getCluster().get("storage-1");
CoherenceClusterMember member2 = m_coherenceResource.getCluster().get("storage-2");
shouldTimeOutIfWriteLockIsHeldByAnotherMember(member1, member2);
}
@Test
void shouldTimeOutIfWriteLockIsHeldByAnotherMemberUsingStorageDisabledMembers() throws Exception
{
// Get storage disabled application members from the cluster
CoherenceClusterMember member1 = m_coherenceResource.getCluster().get("application-1");
CoherenceClusterMember member2 = m_coherenceResource.getCluster().get("application-2");
shouldTimeOutIfWriteLockIsHeldByAnotherMember(member1, member2);
}
/**
* This test acquires a write lock on one cluster member for a specific duration and then tries to acquire
* the same lock on another member.
*
* @param member1 the member to acquire the lock on
* @param member2 the member to try to acquire the lock on
*
* @throws Exception if the test fails
*/
void shouldTimeOutIfWriteLockIsHeldByAnotherMember(CoherenceClusterMember member1, CoherenceClusterMember member2) throws Exception
{
String sLockName = "foo";
LockEventListener listener1 = new LockEventListener(sLockName);
LockEventListener listener2 = new LockEventListener(sLockName);
// Add the listeners to listen for lock events
member1.addListener(listener1);
member2.addListener(listener2);
// Acquire the lock on first member (the lock will be held for 5 seconds)
member1.submit(new AcquireWriteLock(sLockName, Duration.ofSeconds(5)));
// wait for the lock acquired event
listener1.awaitWriteAcquired(Duration.ofMinutes(1));
// try to acquire read lock on the second member (should time out after 500 millis)
TryReadLock tryReadLock = new TryReadLock(sLockName, Duration.ofMillis(500));
CompletableFuture<Boolean> futureTryRead = member2.submit(tryReadLock);
assertThat(futureTryRead.get(), is(false));
// try to acquire write lock on the second member (should time out after 500 millis)
TryWriteLock tryWriteLock = new TryWriteLock(sLockName, Duration.ofMillis(500));
CompletableFuture<Boolean> futureTryWrite = member2.submit(tryWriteLock);
assertThat(futureTryWrite.get(), is(false));
// wait for the write lock released event from the first member
listener1.awaitWriteReleased(Duration.ofMinutes(1));
// try again to acquire the write lock on the second member (should succeed)
futureTryWrite = member2.submit(tryWriteLock);
assertThat(futureTryWrite.get(), is(true));
// wait for the write lock acquired and released event from the second member
listener2.awaitWriteAcquired(Duration.ofMinutes(1));
listener2.awaitWriteReleased(Duration.ofMinutes(1));
// try again to acquire the read lock on the second member (should succeed)
futureTryRead = member2.submit(tryReadLock);
assertThat(futureTryRead.get(), is(true));
// wait for the read lock acquired and released event from the second member
listener2.awaitReadAcquired(Duration.ofMinutes(1));
listener2.awaitReadReleased(Duration.ofMinutes(1));
}
@Test
void shouldAcquireReadLockFromMultipleStorageMembers() throws Exception
{
// Get storage members from the cluster
CoherenceClusterMember member1 = m_coherenceResource.getCluster().get("storage-1");
CoherenceClusterMember member2 = m_coherenceResource.getCluster().get("storage-2");
shouldAcquireReadLockFromMultipleMembers(member1, member2);
}
@Test
void shouldAcquireReadLockFromMultipleStorageDisabledMembers() throws Exception
{
// Get storage disabled application members from the cluster
CoherenceClusterMember member1 = m_coherenceResource.getCluster().get("application-1");
CoherenceClusterMember member2 = m_coherenceResource.getCluster().get("application-2");
shouldAcquireReadLockFromMultipleMembers(member1, member2);
}
@Test
void shouldAcquireReadLockFromAllMembers() throws Exception
{
// Get all members from the cluster
CoherenceClusterMember member1 = m_coherenceResource.getCluster().get("storage-1");
CoherenceClusterMember member2 = m_coherenceResource.getCluster().get("storage-2");
CoherenceClusterMember member3 = m_coherenceResource.getCluster().get("application-1");
CoherenceClusterMember member4 = m_coherenceResource.getCluster().get("application-2");
shouldAcquireReadLockFromMultipleMembers(member1, member2, member3, member4);
}
/**
* This test acquires a read lock on each specified cluster member for a specific duration.
*
* @param aMembers the members to acquire the read lock on
*
* @throws Exception if the test fails
*/
void shouldAcquireReadLockFromMultipleMembers(CoherenceClusterMember... aMembers) throws Exception
{
String sLockName = "foo";
Set<CoherenceClusterMember> members = Set.of(aMembers);
// Add the listeners to listen for lock events from each first member.
Set<LockEventListener> listeners = new HashSet<>();
for (CoherenceClusterMember member : members)
{
LockEventListener listener = new LockEventListener(sLockName);
member.addListener(listener);
listeners.add(listener);
}
// Acquire the read lock on each member (the lock will be held for 5 seconds)
members.forEach(member -> member.submit(new AcquireReadLock(sLockName, Duration.ofSeconds(5))));
// wait for the lock acquired event from each member
for (LockEventListener listener : listeners)
{
listener.awaitReadAcquired(Duration.ofMinutes(1));
}
// wait for the lock released event from each member
for (LockEventListener listener : listeners)
{
listener.awaitReadReleased(Duration.ofMinutes(1));
}
}
@Test
void shouldAcquireAndReleaseLockInOrderFromMultipleStorageMembers() throws Exception
{
// Get storage members from the cluster
CoherenceClusterMember member1 = m_coherenceResource.getCluster().get("storage-1");
CoherenceClusterMember member2 = m_coherenceResource.getCluster().get("storage-2");
shouldAcquireAndReleaseLockInOrderFromMultipleMembers(member1, member2);
}
@Test
void shouldAcquireAndReleaseLockInOrderFromMultipleStorageDisabledMembers() throws Exception
{
// Get storage members from the cluster
CoherenceClusterMember member1 = m_coherenceResource.getCluster().get("application-1");
CoherenceClusterMember member2 = m_coherenceResource.getCluster().get("application-2");
shouldAcquireAndReleaseLockInOrderFromMultipleMembers(member1, member2);
}
/**
* This test acquires the same lock from multiple members.
* The first member should acquire the lock and the second member should block until the
* first has released the lock.
*
* @param member1 the first member to acquire the lock
* @param member2 the second member to acquire the lock
*
* @throws Exception if the test fails
*/
void shouldAcquireAndReleaseLockInOrderFromMultipleMembers(CoherenceClusterMember member1, CoherenceClusterMember member2) throws Exception
{
String sLockName = "foo";
LockEventListener listener1 = new LockEventListener(sLockName);
LockEventListener listener2 = new LockEventListener(sLockName);
// Add the listeners to listen for lock events
member1.addListener(listener1);
member2.addListener(listener2);
// Acquire the lock on first member (the lock will be held for 2 seconds)
member1.submit(new AcquireWriteLock(sLockName, Duration.ofSeconds(2)));
// wait for the lock acquired event
listener1.awaitWriteAcquired(Duration.ofMinutes(1));
// Try to acquire the lock on second member (should fail)
assertThat(member2.invoke(new TryWriteLock(sLockName)), is(false));
// Acquire the lock on the second member, should block until the first member releases
member2.submit(new AcquireWriteLock(sLockName, Duration.ofSeconds(1)));
// wait for the second member to acquire the lock (should be after member 1 releases the lock)
listener2.awaitWriteAcquired(Duration.ofMinutes(1));
// wait for the second member to release the lock
listener2.awaitWriteReleased(Duration.ofMinutes(1));
// Assert the locks were acquired and released in the order expected
System.out.println("Acquired #1: " + listener1.getWriteAcquiredOrder());
System.out.println("Released #1: " + listener1.getWriteReleasedOrder());
System.out.println("Acquired #2: " + listener2.getWriteAcquiredOrder());
System.out.println("Released #2: " + listener2.getWriteReleasedOrder());
assertThat(listener1.getWriteAcquiredOrder(), lessThan(listener1.getWriteReleasedOrder()));
assertThat(listener1.getWriteReleasedOrder(), lessThan(listener2.getWriteAcquiredOrder()));
assertThat(listener2.getWriteAcquiredOrder(), lessThan(listener2.getWriteReleasedOrder()));
}
@Test
void shouldAcquireLockHeldByFailedStorageMember() throws Exception
{
// Get storage members from the cluster
CoherenceClusterMember member1 = m_coherenceResource.getCluster().get("storage-3");
CoherenceClusterMember member2 = m_coherenceResource.getCluster().get("storage-2");
shouldAcquireLockHeldByFailedMember(member1, member2);
}
@Test
void shouldAcquireLockHeldByFailedStorageDisabledMember() throws Exception
{
// Get storage disabled application members from the cluster
CoherenceClusterMember member1 = m_coherenceResource.getCluster().get("application-3");
CoherenceClusterMember member2 = m_coherenceResource.getCluster().get("application-2");
shouldAcquireLockHeldByFailedMember(member1, member2);
}
/**
* This test checks that a lock held by a failed member is automatically released,
* and subsequently acquired by another member.
*
* @param member1 the first member to acquire the lock on and then kill
* @param member2 the second member to try to acquire the lock on
*
* @throws Exception if the test fails
*/
void shouldAcquireLockHeldByFailedMember(CoherenceClusterMember member1, CoherenceClusterMember member2) throws Exception
{
LockEventListener foo1 = new LockEventListener("foo");
LockEventListener foo2 = new LockEventListener("foo");
LockEventListener bar1 = new LockEventListener("bar");
LockEventListener bar2 = new LockEventListener("bar");
// Add the listeners to listen for lock events from the first member.
member1.addListener(foo1);
member1.addListener(bar1);
// Add the listeners to listen for lock events from the second member.
member2.addListener(foo2);
member2.addListener(bar2);
Base.sleep(1000);
// Acquire read and write lock on first member (the lock will be held for 1 minute,
// but should be released as soon as the member is killed)
member1.submit(new AcquireWriteLock("foo", Duration.ofMinutes(1)));
member1.submit(new AcquireReadLock("bar", Duration.ofMinutes(1)));
// wait for write and read lock acquired event
foo1.awaitWriteAcquired(Duration.ofMinutes(1));
bar1.awaitReadAcquired(Duration.ofMinutes(1));
// Acquire write locks on second member
member2.submit(new AcquireWriteLock("foo", Duration.ofSeconds(5)));
member2.submit(new AcquireWriteLock("bar", Duration.ofSeconds(5)));
// Kill first member
member1.close();
// wait for the lock acquired and released events from the second member
foo2.awaitWriteAcquired(Duration.ofMinutes(1));
bar2.awaitWriteAcquired(Duration.ofMinutes(1));
foo2.awaitWriteReleased(Duration.ofMinutes(1));
bar2.awaitWriteReleased(Duration.ofMinutes(1));
}
// ----- inner class: TryWriteLock --------------------------------------
/**
* A Bedrock remote callable that tries to acquire a lock within a given timeout.
* <p>
* The result of the call to {@link RemoteReadWriteLock.WriteLock#tryLock()} is returned.
* If the lock was acquired it is immediately released.
*/
static class TryWriteLock
implements RemoteCallable<Boolean>
{
/**
* A remote channel injected by Bedrock and used to fire events back to the test.
*/
@RemoteChannel.Inject
private RemoteChannel remoteChannel;
/**
* The name of the lock to acquire.
*/
private final String f_sLockName;
/**
* The amount of time to wait to acquire the lock.
*/
private final Duration f_timeout;
/**
* Create a {@link TryWriteLock} callable.
*
* @param sLockName the name of the lock to acquire
*/
public TryWriteLock(String sLockName)
{
f_sLockName = sLockName;
f_timeout = Duration.ZERO;
}
/**
* Create a {@link TryWriteLock} callable.
*
* @param sLockName the name of the lock to acquire
* @param duration the amount of time to wait to acquire the lock
*/
public TryWriteLock(String sLockName, Duration duration)
{
f_sLockName = sLockName;
f_timeout = duration;
}
@Override
public Boolean call() throws Exception
{
RemoteReadWriteLock lock = Locks.remoteReadWriteLock(f_sLockName);
boolean fAcquired;
if (f_timeout.isZero())
{
Logger.info("Trying to acquire write lock " + f_sLockName + " with zero timeout");
fAcquired = lock.writeLock().tryLock();
}
else
{
Logger.info("Trying to acquire write lock " + f_sLockName + " with timeout of " + f_timeout);
fAcquired = lock.writeLock().tryLock(f_timeout.toMillis(), TimeUnit.MILLISECONDS);
}
if (fAcquired)
{
remoteChannel.raise(new LockEvent(f_sLockName, LockEventType.WriteAcquired));
Logger.info("Tried and succeeded to acquire write lock " + f_sLockName + " within timeout " + f_timeout);
remoteChannel.raise(new LockEvent(f_sLockName, LockEventType.WriteReleased));
lock.writeLock().unlock();
}
else
{
Logger.info("Tried and failed to acquire write lock " + f_sLockName + " within timeout " + f_timeout);
}
return fAcquired;
}
}
// ----- inner class: TryReadLock ---------------------------------------
/**
* A Bedrock remote callable that tries to acquire a read lock within a given timeout.
* <p>
* The result of the call to {@link RemoteReadWriteLock.ReadLock#tryLock()} is returned.
* If the lock was acquired it is immediately released.
*/
static class TryReadLock
implements RemoteCallable<Boolean>
{
/**
* A remote channel injected by Bedrock and used to fire events back to the test.
*/
@RemoteChannel.Inject
private RemoteChannel remoteChannel;
/**
* The name of the lock to acquire.
*/
private final String f_sLockName;
/**
* The amount of time to wait to acquire the lock.
*/
private final Duration f_timeout;
/**
* Create a {@link TryReadLock} callable.
*
* @param sLockName the name of the lock to acquire
*/
public TryReadLock(String sLockName)
{
f_sLockName = sLockName;
f_timeout = Duration.ZERO;
}
/**
* Create a {@link TryReadLock} callable.
*
* @param sLockName the name of the lock to acquire
* @param duration the amount of time to wait to acquire the lock
*/
public TryReadLock(String sLockName, Duration duration)
{
f_sLockName = sLockName;
f_timeout = duration;
}
@Override
public Boolean call() throws Exception
{
RemoteReadWriteLock lock = Locks.remoteReadWriteLock(f_sLockName);
boolean fAcquired;
if (f_timeout.isZero())
{
Logger.info("Trying to acquire read lock " + f_sLockName + " with zero timeout");
fAcquired = lock.readLock().tryLock();
}
else
{
Logger.info("Trying to acquire read lock " + f_sLockName + " with timeout of " + f_timeout);
fAcquired = lock.readLock().tryLock(f_timeout.toMillis(), TimeUnit.MILLISECONDS);
}
if (fAcquired)
{
remoteChannel.raise(new LockEvent(f_sLockName, LockEventType.ReadAcquired));
Logger.info("Tried and succeeded to acquire read lock " + f_sLockName + " within timeout " + f_timeout);
remoteChannel.raise(new LockEvent(f_sLockName, LockEventType.ReadReleased));
lock.readLock().unlock();
}
else
{
Logger.info("Tried and failed to acquire read lock " + f_sLockName + " within timeout " + f_timeout);
}
return fAcquired;
}
}
// ----- inner class: AcquireWriteLock ----------------------------------
/**
* A Bedrock remote callable that acquires a lock for a specific amount of time.
* <p>
* This callable fires remote events to indicate when the lock was acquired and released.
*/
static class AcquireWriteLock
implements RemoteCallable<Void>
{
/**
* A remote channel injected by Bedrock and used to fire events back to the test.
*/
@RemoteChannel.Inject
private RemoteChannel remoteChannel;
/**
* The name of the lock to acquire.
*/
private final String f_sLockName;
/**
* The duration to hold the lock for.
*/
private final Duration f_duration;
/**
* Create an {@link AcquireWriteLock} callable.
*
* @param sLockName the name of the lock to acquire
* @param duration the duration to hold the lock for
*/
AcquireWriteLock(String sLockName, Duration duration)
{
f_sLockName = sLockName;
f_duration = duration;
}
@Override
public Void call()
{
Logger.info("Acquiring write lock " + f_sLockName);
RemoteReadWriteLock lock = Locks.remoteReadWriteLock(f_sLockName);
lock.writeLock().lock();
try
{
remoteChannel.raise(new LockEvent(f_sLockName, LockEventType.WriteAcquired));
Logger.info("Write lock " + f_sLockName + " acquired by " + lock.getOwner());
Thread.sleep(f_duration.toMillis());
}
catch (InterruptedException ignore)
{
}
finally
{
remoteChannel.raise(new LockEvent(f_sLockName, LockEventType.WriteReleased));
lock.writeLock().unlock();
Logger.info("Write lock " + f_sLockName + " released by " + Thread.currentThread());
}
return null;
}
}
// ----- inner class: AcquireReadLock ----------------------------------
/**
* A Bedrock remote callable that acquires a lock for a specific amount of time.
* <p>
* This callable fires remote events to indicate when the lock was acquired and released.
*/
static class AcquireReadLock
implements RemoteCallable<Void>
{
/**
* A remote channel injected by Bedrock and used to fire events back to the test.
*/
@RemoteChannel.Inject
private RemoteChannel remoteChannel;
/**
* The name of the lock to acquire.
*/
private final String f_sLockName;
/**
* The duration to hold the lock for.
*/
private final Duration f_duration;
/**
* Create an {@link AcquireWriteLock} callable.
*
* @param sLockName the name of the lock to acquire
* @param duration the duration to hold the lock for
*/
AcquireReadLock(String sLockName, Duration duration)
{
f_sLockName = sLockName;
f_duration = duration;
}
@Override
public Void call()
{
Logger.info("Acquiring read lock " + f_sLockName);
RemoteReadWriteLock lock = Locks.remoteReadWriteLock(f_sLockName);
lock.readLock().lock();
try
{
remoteChannel.raise(new LockEvent(f_sLockName, LockEventType.ReadAcquired));
Logger.info("Read lock " + f_sLockName + " acquired by " + Thread.currentThread());
Thread.sleep(f_duration.toMillis());
}
catch (InterruptedException ignore)
{
}
finally
{
remoteChannel.raise(new LockEvent(f_sLockName, LockEventType.ReadReleased));
lock.readLock().unlock();
Logger.info("Read lock " + f_sLockName + " released by " + Thread.currentThread());
}
return null;
}
}
// ----- inner class: LockEvent -----------------------------------------
/**
* A Bedrock remote event submitted by the {@link AcquireWriteLock} callable
* to notify the calling test when the lock has been acquired and released.
*/
static class LockEvent
implements RemoteEvent
{
/**
* The name of the lock.
*/
private final String f_sLockName;
/**
* The type of the event.
*/
private final LockEventType f_type;
/**
* The global order of the event.
*/
private final int f_order;
/**
* Create a lock event.
*
* @param sLockName the name of the lock
* @param type the type of the event
*/
public LockEvent(String sLockName, LockEventType type)
{
f_sLockName = sLockName;
f_type = type;
f_order = Atomics.remoteAtomicInteger("ClusteredDistributedReadWriteLockIT.eventCounter").incrementAndGet();
}
/**
* Returns the name of the lock.
*
* @return the name of the lock
*/
public String getLockName()
{
return f_sLockName;
}
/**
* Returns the event type.
*
* @return the event type
*/
public LockEventType getEventType()
{
return f_type;
}
/**
* Return the global event order.
*
* @return the global event order
*/
public int getOrder()
{
return f_order;
}
}
// ----- inner class LockEventListener ----------------------------------
/**
* A {@link RemoteEventListener} that listens for {@link LockEvent lock events}.
*/
static class LockEventListener
implements RemoteEventListener
{
/**
* The name of the lock.
*/
private final String f_sLockName;
/**
* A future that completes when the read lock acquired event is received.
*/
private final CompletableFuture<Integer> f_futureReadAcquired = new CompletableFuture<>();
/**
* A future that completes when the read lock released event is received.
*/
private final CompletableFuture<Integer> f_futureReadReleased = new CompletableFuture<>();
/**
* A future that completes when the write lock acquired event is received.
*/
private final CompletableFuture<Integer> f_futureWriteAcquired = new CompletableFuture<>();
/**
* A future that completes when the write lock released event is received.
*/
private final CompletableFuture<Integer> f_futureWriteReleased = new CompletableFuture<>();
/**
* Create a {@link LockEventListener}.
*
* @param sLockName the name of the lock
*/
public LockEventListener(String sLockName)
{
f_sLockName = sLockName;
}
@Override
public void onEvent(RemoteEvent event)
{
if (event instanceof LockEvent)
{
LockEvent e = (LockEvent) event;
if (f_sLockName.equals(e.getLockName()))
{
switch (e.getEventType())
{
case ReadAcquired:
f_futureReadAcquired.complete(e.getOrder());
break;
case ReadReleased:
f_futureReadReleased.complete(e.getOrder());
break;
case WriteAcquired:
f_futureWriteAcquired.complete(e.getOrder());
break;
case WriteReleased:
f_futureWriteReleased.complete(e.getOrder());
break;
}
}
}
}
/**
* Wait for the read lock acquired event.
*
* @param timeout the maximum amount of time to wait
*
* @return the global order of the read acquired event
*/
public int awaitReadAcquired(Duration timeout) throws Exception
{
return f_futureReadAcquired.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
}
/**
* Wait for the write lock acquired event.
*
* @param timeout the maximum amount of time to wait
*
* @return the global order of the write acquired event
*/
public int awaitWriteAcquired(Duration timeout) throws Exception
{
return f_futureWriteAcquired.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
}
/**
* Returns true if the read lock has been acquired.
*
* @return true if the read lock has been acquired
*/
public boolean isReadAcquired()
{
return f_futureReadAcquired.isDone();
}
/**
* Returns true if the write lock has been acquired.
*
* @return true if the write lock has been acquired
*/
public boolean isWriteAcquired()
{
return f_futureWriteAcquired.isDone();
}
/**
* Returns the global order of the read lock acquired event.
*
* @return the global order of the read lock acquired event
*/
public int getReadAcquiredOrder()
{
return f_futureReadAcquired.join();
}
/**
* Returns the global order of the write lock acquired event.
*
* @return the global order of the write lock acquired event
*/
public int getWriteAcquiredOrder()
{
return f_futureWriteAcquired.join();
}
/**
* Wait for the read lock released event.
*
* @param timeout the maximum amount of time to wait
*
* @return the global order of the read released event
*/
public int awaitReadReleased(Duration timeout) throws Exception
{
return f_futureReadReleased.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
}
/**
* Wait for the write lock released event.
*
* @param timeout the maximum amount of time to wait
*
* @return the global order of the write released event
*/
public int awaitWriteReleased(Duration timeout) throws Exception
{
return f_futureWriteReleased.get(timeout.toMillis(), TimeUnit.MILLISECONDS);
}
/**
* Returns true if the read lock has been acquired and released.
*
* @return true if the read lock has been acquired and released
*/
public boolean isReadReleased()
{
return f_futureReadAcquired.isDone() && f_futureReadReleased.isDone();
}
/**
* Returns true if the write lock has been acquired and released.
*
* @return true if the write lock has been acquired and released
*/
public boolean isWriteReleased()
{
return f_futureWriteAcquired.isDone() && f_futureWriteReleased.isDone();
}
/**
* Returns the global order of the read lock released event.
*
* @return the global order of the read lock released event
*/
public int getReadReleasedOrder()
{
return f_futureReadReleased.join();
}
/**
* Returns the global order of the write lock released event.
*
* @return the global order of the write lock released event
*/
public int getWriteReleasedOrder()
{
return f_futureWriteReleased.join();
}
}
// ----- inner enum LockEventType ---------------------------------------
/**
* An enum of lock event types.
*/
enum LockEventType
{
ReadAcquired,
ReadReleased,
WriteAcquired,
WriteReleased
}
// ----- data members ---------------------------------------------------
/**
* A Bedrock JUnit5 extension with a Coherence cluster for the tests.
*/
static CoherenceClusterExtension m_coherenceResource;
/**
* This is a work-around to fix the fact that the JUnit5 test logs extension
* in Bedrock does not work for BeforeAll methods and extensions.
*/
static class TestLogs
extends AbstractTestLogs
{
public TestLogs(Class<?> testClass)
{
init(testClass, "BeforeAll");
}
}
}
|
googleapis/google-cloud-java | 38,209 | java-billing/proto-google-cloud-billing-v1/src/main/java/com/google/cloud/billing/v1/ProjectBillingInfo.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/billing/v1/cloud_billing.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.billing.v1;
/**
*
*
* <pre>
* Encapsulation of billing information for a Google Cloud Console project. A
* project has at most one associated billing account at a time (but a billing
* account can be assigned to multiple projects).
* </pre>
*
* Protobuf type {@code google.cloud.billing.v1.ProjectBillingInfo}
*/
public final class ProjectBillingInfo extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.billing.v1.ProjectBillingInfo)
ProjectBillingInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use ProjectBillingInfo.newBuilder() to construct.
private ProjectBillingInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ProjectBillingInfo() {
name_ = "";
projectId_ = "";
billingAccountName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ProjectBillingInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.billing.v1.CloudBillingProto
.internal_static_google_cloud_billing_v1_ProjectBillingInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.billing.v1.CloudBillingProto
.internal_static_google_cloud_billing_v1_ProjectBillingInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.billing.v1.ProjectBillingInfo.class,
com.google.cloud.billing.v1.ProjectBillingInfo.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Output only. The resource name for the `ProjectBillingInfo`; has the form
* `projects/{project_id}/billingInfo`. For example, the resource name for the
* billing information for project `tokyo-rain-123` would be
* `projects/tokyo-rain-123/billingInfo`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The resource name for the `ProjectBillingInfo`; has the form
* `projects/{project_id}/billingInfo`. For example, the resource name for the
* billing information for project `tokyo-rain-123` would be
* `projects/tokyo-rain-123/billingInfo`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROJECT_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object projectId_ = "";
/**
*
*
* <pre>
* Output only. The ID of the project that this `ProjectBillingInfo`
* represents, such as `tokyo-rain-123`. This is a convenience field so that
* you don't need to parse the `name` field to obtain a project ID.
* </pre>
*
* <code>string project_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The projectId.
*/
@java.lang.Override
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The ID of the project that this `ProjectBillingInfo`
* represents, such as `tokyo-rain-123`. This is a convenience field so that
* you don't need to parse the `name` field to obtain a project ID.
* </pre>
*
* <code>string project_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for projectId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BILLING_ACCOUNT_NAME_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object billingAccountName_ = "";
/**
*
*
* <pre>
* The resource name of the billing account associated with the project, if
* any. For example, `billingAccounts/012345-567890-ABCDEF`.
* </pre>
*
* <code>string billing_account_name = 3;</code>
*
* @return The billingAccountName.
*/
@java.lang.Override
public java.lang.String getBillingAccountName() {
java.lang.Object ref = billingAccountName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
billingAccountName_ = s;
return s;
}
}
/**
*
*
* <pre>
* The resource name of the billing account associated with the project, if
* any. For example, `billingAccounts/012345-567890-ABCDEF`.
* </pre>
*
* <code>string billing_account_name = 3;</code>
*
* @return The bytes for billingAccountName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBillingAccountNameBytes() {
java.lang.Object ref = billingAccountName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
billingAccountName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BILLING_ENABLED_FIELD_NUMBER = 4;
private boolean billingEnabled_ = false;
/**
*
*
* <pre>
* Output only. True if the project is associated with an open billing
* account, to which usage on the project is charged. False if the project is
* associated with a closed billing account, or no billing account at all, and
* therefore cannot use paid services.
* </pre>
*
* <code>bool billing_enabled = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The billingEnabled.
*/
@java.lang.Override
public boolean getBillingEnabled() {
return billingEnabled_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(billingAccountName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, billingAccountName_);
}
if (billingEnabled_ != false) {
output.writeBool(4, billingEnabled_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, projectId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(billingAccountName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, billingAccountName_);
}
if (billingEnabled_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, billingEnabled_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.billing.v1.ProjectBillingInfo)) {
return super.equals(obj);
}
com.google.cloud.billing.v1.ProjectBillingInfo other =
(com.google.cloud.billing.v1.ProjectBillingInfo) obj;
if (!getName().equals(other.getName())) return false;
if (!getProjectId().equals(other.getProjectId())) return false;
if (!getBillingAccountName().equals(other.getBillingAccountName())) return false;
if (getBillingEnabled() != other.getBillingEnabled()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER;
hash = (53 * hash) + getProjectId().hashCode();
hash = (37 * hash) + BILLING_ACCOUNT_NAME_FIELD_NUMBER;
hash = (53 * hash) + getBillingAccountName().hashCode();
hash = (37 * hash) + BILLING_ENABLED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getBillingEnabled());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.billing.v1.ProjectBillingInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.billing.v1.ProjectBillingInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Encapsulation of billing information for a Google Cloud Console project. A
* project has at most one associated billing account at a time (but a billing
* account can be assigned to multiple projects).
* </pre>
*
* Protobuf type {@code google.cloud.billing.v1.ProjectBillingInfo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.billing.v1.ProjectBillingInfo)
com.google.cloud.billing.v1.ProjectBillingInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.billing.v1.CloudBillingProto
.internal_static_google_cloud_billing_v1_ProjectBillingInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.billing.v1.CloudBillingProto
.internal_static_google_cloud_billing_v1_ProjectBillingInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.billing.v1.ProjectBillingInfo.class,
com.google.cloud.billing.v1.ProjectBillingInfo.Builder.class);
}
// Construct using com.google.cloud.billing.v1.ProjectBillingInfo.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
projectId_ = "";
billingAccountName_ = "";
billingEnabled_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.billing.v1.CloudBillingProto
.internal_static_google_cloud_billing_v1_ProjectBillingInfo_descriptor;
}
@java.lang.Override
public com.google.cloud.billing.v1.ProjectBillingInfo getDefaultInstanceForType() {
return com.google.cloud.billing.v1.ProjectBillingInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.billing.v1.ProjectBillingInfo build() {
com.google.cloud.billing.v1.ProjectBillingInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.billing.v1.ProjectBillingInfo buildPartial() {
com.google.cloud.billing.v1.ProjectBillingInfo result =
new com.google.cloud.billing.v1.ProjectBillingInfo(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.billing.v1.ProjectBillingInfo result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.projectId_ = projectId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.billingAccountName_ = billingAccountName_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.billingEnabled_ = billingEnabled_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.billing.v1.ProjectBillingInfo) {
return mergeFrom((com.google.cloud.billing.v1.ProjectBillingInfo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.billing.v1.ProjectBillingInfo other) {
if (other == com.google.cloud.billing.v1.ProjectBillingInfo.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getProjectId().isEmpty()) {
projectId_ = other.projectId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getBillingAccountName().isEmpty()) {
billingAccountName_ = other.billingAccountName_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.getBillingEnabled() != false) {
setBillingEnabled(other.getBillingEnabled());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
projectId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
billingAccountName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32:
{
billingEnabled_ = input.readBool();
bitField0_ |= 0x00000008;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Output only. The resource name for the `ProjectBillingInfo`; has the form
* `projects/{project_id}/billingInfo`. For example, the resource name for the
* billing information for project `tokyo-rain-123` would be
* `projects/tokyo-rain-123/billingInfo`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The resource name for the `ProjectBillingInfo`; has the form
* `projects/{project_id}/billingInfo`. For example, the resource name for the
* billing information for project `tokyo-rain-123` would be
* `projects/tokyo-rain-123/billingInfo`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The resource name for the `ProjectBillingInfo`; has the form
* `projects/{project_id}/billingInfo`. For example, the resource name for the
* billing information for project `tokyo-rain-123` would be
* `projects/tokyo-rain-123/billingInfo`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The resource name for the `ProjectBillingInfo`; has the form
* `projects/{project_id}/billingInfo`. For example, the resource name for the
* billing information for project `tokyo-rain-123` would be
* `projects/tokyo-rain-123/billingInfo`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The resource name for the `ProjectBillingInfo`; has the form
* `projects/{project_id}/billingInfo`. For example, the resource name for the
* billing information for project `tokyo-rain-123` would be
* `projects/tokyo-rain-123/billingInfo`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object projectId_ = "";
/**
*
*
* <pre>
* Output only. The ID of the project that this `ProjectBillingInfo`
* represents, such as `tokyo-rain-123`. This is a convenience field so that
* you don't need to parse the `name` field to obtain a project ID.
* </pre>
*
* <code>string project_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The projectId.
*/
public java.lang.String getProjectId() {
java.lang.Object ref = projectId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
projectId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The ID of the project that this `ProjectBillingInfo`
* represents, such as `tokyo-rain-123`. This is a convenience field so that
* you don't need to parse the `name` field to obtain a project ID.
* </pre>
*
* <code>string project_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for projectId.
*/
public com.google.protobuf.ByteString getProjectIdBytes() {
java.lang.Object ref = projectId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
projectId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The ID of the project that this `ProjectBillingInfo`
* represents, such as `tokyo-rain-123`. This is a convenience field so that
* you don't need to parse the `name` field to obtain a project ID.
* </pre>
*
* <code>string project_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The projectId to set.
* @return This builder for chaining.
*/
public Builder setProjectId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
projectId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The ID of the project that this `ProjectBillingInfo`
* represents, such as `tokyo-rain-123`. This is a convenience field so that
* you don't need to parse the `name` field to obtain a project ID.
* </pre>
*
* <code>string project_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearProjectId() {
projectId_ = getDefaultInstance().getProjectId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The ID of the project that this `ProjectBillingInfo`
* represents, such as `tokyo-rain-123`. This is a convenience field so that
* you don't need to parse the `name` field to obtain a project ID.
* </pre>
*
* <code>string project_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for projectId to set.
* @return This builder for chaining.
*/
public Builder setProjectIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
projectId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object billingAccountName_ = "";
/**
*
*
* <pre>
* The resource name of the billing account associated with the project, if
* any. For example, `billingAccounts/012345-567890-ABCDEF`.
* </pre>
*
* <code>string billing_account_name = 3;</code>
*
* @return The billingAccountName.
*/
public java.lang.String getBillingAccountName() {
java.lang.Object ref = billingAccountName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
billingAccountName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The resource name of the billing account associated with the project, if
* any. For example, `billingAccounts/012345-567890-ABCDEF`.
* </pre>
*
* <code>string billing_account_name = 3;</code>
*
* @return The bytes for billingAccountName.
*/
public com.google.protobuf.ByteString getBillingAccountNameBytes() {
java.lang.Object ref = billingAccountName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
billingAccountName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The resource name of the billing account associated with the project, if
* any. For example, `billingAccounts/012345-567890-ABCDEF`.
* </pre>
*
* <code>string billing_account_name = 3;</code>
*
* @param value The billingAccountName to set.
* @return This builder for chaining.
*/
public Builder setBillingAccountName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
billingAccountName_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The resource name of the billing account associated with the project, if
* any. For example, `billingAccounts/012345-567890-ABCDEF`.
* </pre>
*
* <code>string billing_account_name = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearBillingAccountName() {
billingAccountName_ = getDefaultInstance().getBillingAccountName();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The resource name of the billing account associated with the project, if
* any. For example, `billingAccounts/012345-567890-ABCDEF`.
* </pre>
*
* <code>string billing_account_name = 3;</code>
*
* @param value The bytes for billingAccountName to set.
* @return This builder for chaining.
*/
public Builder setBillingAccountNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
billingAccountName_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private boolean billingEnabled_;
/**
*
*
* <pre>
* Output only. True if the project is associated with an open billing
* account, to which usage on the project is charged. False if the project is
* associated with a closed billing account, or no billing account at all, and
* therefore cannot use paid services.
* </pre>
*
* <code>bool billing_enabled = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The billingEnabled.
*/
@java.lang.Override
public boolean getBillingEnabled() {
return billingEnabled_;
}
/**
*
*
* <pre>
* Output only. True if the project is associated with an open billing
* account, to which usage on the project is charged. False if the project is
* associated with a closed billing account, or no billing account at all, and
* therefore cannot use paid services.
* </pre>
*
* <code>bool billing_enabled = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The billingEnabled to set.
* @return This builder for chaining.
*/
public Builder setBillingEnabled(boolean value) {
billingEnabled_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. True if the project is associated with an open billing
* account, to which usage on the project is charged. False if the project is
* associated with a closed billing account, or no billing account at all, and
* therefore cannot use paid services.
* </pre>
*
* <code>bool billing_enabled = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearBillingEnabled() {
bitField0_ = (bitField0_ & ~0x00000008);
billingEnabled_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.billing.v1.ProjectBillingInfo)
}
// @@protoc_insertion_point(class_scope:google.cloud.billing.v1.ProjectBillingInfo)
private static final com.google.cloud.billing.v1.ProjectBillingInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.billing.v1.ProjectBillingInfo();
}
public static com.google.cloud.billing.v1.ProjectBillingInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ProjectBillingInfo> PARSER =
new com.google.protobuf.AbstractParser<ProjectBillingInfo>() {
@java.lang.Override
public ProjectBillingInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ProjectBillingInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ProjectBillingInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.billing.v1.ProjectBillingInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/ExoPlayer | 38,212 | demos/transformer/src/main/java/com/google/android/exoplayer2/transformerdemo/ConfigurationActivity.java | /*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformerdemo;
import static android.Manifest.permission.READ_EXTERNAL_STORAGE;
import static android.Manifest.permission.READ_MEDIA_VIDEO;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.Util.SDK_INT;
import android.app.Activity;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import androidx.activity.result.ActivityResult;
import androidx.activity.result.ActivityResultLauncher;
import androidx.activity.result.contract.ActivityResultContracts;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.transformer.TransformationRequest;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.material.slider.RangeSlider;
import com.google.android.material.slider.Slider;
import com.google.common.collect.ImmutableMap;
import java.util.Arrays;
import java.util.List;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/**
* An {@link Activity} that sets the configuration to use for exporting and playing media, using
* {@link TransformerActivity}.
*
* @deprecated com.google.android.exoplayer2 is deprecated. Please migrate to androidx.media3 (which
* contains the same ExoPlayer code). See <a
* href="https://developer.android.com/guide/topics/media/media3/getting-started/migration-guide">the
* migration guide</a> for more details, including a script to help with the migration.
*/
@Deprecated
public final class ConfigurationActivity extends AppCompatActivity {
public static final String SHOULD_REMOVE_AUDIO = "should_remove_audio";
public static final String SHOULD_REMOVE_VIDEO = "should_remove_video";
public static final String SHOULD_FLATTEN_FOR_SLOW_MOTION = "should_flatten_for_slow_motion";
public static final String FORCE_AUDIO_TRACK = "force_audio_track";
public static final String AUDIO_MIME_TYPE = "audio_mime_type";
public static final String VIDEO_MIME_TYPE = "video_mime_type";
public static final String RESOLUTION_HEIGHT = "resolution_height";
public static final String SCALE_X = "scale_x";
public static final String SCALE_Y = "scale_y";
public static final String ROTATE_DEGREES = "rotate_degrees";
public static final String TRIM_START_MS = "trim_start_ms";
public static final String TRIM_END_MS = "trim_end_ms";
public static final String ENABLE_FALLBACK = "enable_fallback";
public static final String ENABLE_DEBUG_PREVIEW = "enable_debug_preview";
public static final String ABORT_SLOW_EXPORT = "abort_slow_export";
public static final String HDR_MODE = "hdr_mode";
public static final String AUDIO_EFFECTS_SELECTIONS = "audio_effects_selections";
public static final String VIDEO_EFFECTS_SELECTIONS = "video_effects_selections";
public static final String PERIODIC_VIGNETTE_CENTER_X = "periodic_vignette_center_x";
public static final String PERIODIC_VIGNETTE_CENTER_Y = "periodic_vignette_center_y";
public static final String PERIODIC_VIGNETTE_INNER_RADIUS = "periodic_vignette_inner_radius";
public static final String PERIODIC_VIGNETTE_OUTER_RADIUS = "periodic_vignette_outer_radius";
public static final String COLOR_FILTER_SELECTION = "color_filter_selection";
public static final String CONTRAST_VALUE = "contrast_value";
public static final String RGB_ADJUSTMENT_RED_SCALE = "rgb_adjustment_red_scale";
public static final String RGB_ADJUSTMENT_GREEN_SCALE = "rgb_adjustment_green_scale";
public static final String RGB_ADJUSTMENT_BLUE_SCALE = "rgb_adjustment_blue_scale";
public static final String HSL_ADJUSTMENTS_HUE = "hsl_adjustments_hue";
public static final String HSL_ADJUSTMENTS_SATURATION = "hsl_adjustments_saturation";
public static final String HSL_ADJUSTMENTS_LIGHTNESS = "hsl_adjustments_lightness";
public static final String BITMAP_OVERLAY_URI = "bitmap_overlay_uri";
public static final String BITMAP_OVERLAY_ALPHA = "bitmap_overlay_alpha";
public static final String TEXT_OVERLAY_TEXT = "text_overlay_text";
public static final String TEXT_OVERLAY_TEXT_COLOR = "text_overlay_text_color";
public static final String TEXT_OVERLAY_ALPHA = "text_overlay_alpha";
// Video effect selections.
public static final int DIZZY_CROP_INDEX = 0;
public static final int EDGE_DETECTOR_INDEX = 1;
public static final int COLOR_FILTERS_INDEX = 2;
public static final int MAP_WHITE_TO_GREEN_LUT_INDEX = 3;
public static final int RGB_ADJUSTMENTS_INDEX = 4;
public static final int HSL_ADJUSTMENT_INDEX = 5;
public static final int CONTRAST_INDEX = 6;
public static final int PERIODIC_VIGNETTE_INDEX = 7;
public static final int SPIN_3D_INDEX = 8;
public static final int ZOOM_IN_INDEX = 9;
public static final int OVERLAY_LOGO_AND_TIMER_INDEX = 10;
public static final int BITMAP_OVERLAY_INDEX = 11;
public static final int TEXT_OVERLAY_INDEX = 12;
// Audio effect selections.
public static final int HIGH_PITCHED_INDEX = 0;
public static final int SAMPLE_RATE_INDEX = 1;
public static final int SKIP_SILENCE_INDEX = 2;
// Color filter options.
public static final int COLOR_FILTER_GRAYSCALE = 0;
public static final int COLOR_FILTER_INVERTED = 1;
public static final int COLOR_FILTER_SEPIA = 2;
public static final int FILE_PERMISSION_REQUEST_CODE = 1;
private static final String[] PRESET_FILE_URIS = {
"https://storage.googleapis.com/exoplayer-test-media-1/mp4/android-screens-10s.mp4",
"https://storage.googleapis.com/exoplayer-test-media-0/android-block-1080-hevc.mp4",
"https://html5demos.com/assets/dizzy.mp4",
"https://html5demos.com/assets/dizzy.webm",
"https://storage.googleapis.com/exoplayer-test-media-1/mp4/portrait_4k60.mp4",
"https://storage.googleapis.com/exoplayer-test-media-1/mp4/8k24fps_4s.mp4",
"https://storage.googleapis.com/exoplayer-test-media-1/mp4/1920w_1080h_4s.mp4",
"https://storage.googleapis.com/exoplayer-test-media-0/BigBuckBunny_320x180.mp4",
"https://storage.googleapis.com/exoplayer-test-media-1/mp4/portrait_avc_aac.mp4",
"https://storage.googleapis.com/exoplayer-test-media-1/mp4/portrait_rotated_avc_aac.mp4",
"https://storage.googleapis.com/exoplayer-test-media-1/jpg/london.jpg",
"https://storage.googleapis.com/exoplayer-test-media-1/jpg/tokyo.jpg",
"https://storage.googleapis.com/exoplayer-test-media-1/mp4/slow-motion/slowMotion_stopwatch_240fps_long.mp4",
"https://storage.googleapis.com/exoplayer-test-media-1/gen/screens/dash-vod-single-segment/manifest-baseline.mpd",
"https://storage.googleapis.com/exoplayer-test-media-1/mp4/samsung-s21-hdr-hdr10.mp4",
"https://storage.googleapis.com/exoplayer-test-media-1/mp4/Pixel7Pro_HLG_1080P.mp4",
"https://storage.googleapis.com/exoplayer-test-media-1/mp4/sample_video_track_only.mp4",
};
private static final String[] PRESET_FILE_URI_DESCRIPTIONS = { // same order as PRESET_FILE_URIS
"720p H264 video and AAC audio",
"1080p H265 video and AAC audio",
"360p H264 video and AAC audio",
"360p VP8 video and Vorbis audio",
"4K H264 video and AAC audio (portrait, no B-frames)",
"8k H265 video and AAC audio",
"Short 1080p H265 video and AAC audio",
"Long 180p H264 video and AAC audio",
"H264 video and AAC audio (portrait, H > W, 0°)",
"H264 video and AAC audio (portrait, H < W, 90°)",
"London JPG image (Plays for 5secs at 30fps)",
"Tokyo JPG image (Portrait, Plays for 5secs at 30fps)",
"SEF slow motion with 240 fps",
"480p DASH (non-square pixels)",
"HDR (HDR10) H265 limited range video (encoding may fail)",
"HDR (HLG) H265 limited range video (encoding may fail)",
"720p H264 video with no audio",
};
private static final String[] AUDIO_EFFECTS = {
"High pitched", "Sample rate of 48000Hz", "Skip silence"
};
private static final String[] VIDEO_EFFECTS = {
"Dizzy crop",
"Edge detector (Media Pipe)",
"Color filters",
"Map White to Green Color Lookup Table",
"RGB Adjustments",
"HSL Adjustments",
"Contrast",
"Periodic vignette",
"3D spin",
"Zoom in start",
"Overlay logo & timer",
"Custom Bitmap Overlay",
"Custom Text Overlay",
};
private static final ImmutableMap<String, @TransformationRequest.HdrMode Integer>
HDR_MODE_DESCRIPTIONS =
new ImmutableMap.Builder<String, @TransformationRequest.HdrMode Integer>()
.put("Keep HDR", TransformationRequest.HDR_MODE_KEEP_HDR)
.put(
"MediaCodec tone-map HDR to SDR",
TransformationRequest.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_MEDIACODEC)
.put(
"OpenGL tone-map HDR to SDR",
TransformationRequest.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL)
.put(
"Force Interpret HDR as SDR",
TransformationRequest.HDR_MODE_EXPERIMENTAL_FORCE_INTERPRET_HDR_AS_SDR)
.build();
private static final ImmutableMap<String, Integer> OVERLAY_COLORS =
new ImmutableMap.Builder<String, Integer>()
.put("BLACK", Color.BLACK)
.put("BLUE", Color.BLUE)
.put("CYAN", Color.CYAN)
.put("DKGRAY", Color.DKGRAY)
.put("GRAY", Color.GRAY)
.put("GREEN", Color.GREEN)
.put("LTGRAY", Color.LTGRAY)
.put("MAGENTA", Color.MAGENTA)
.put("RED", Color.RED)
.put("WHITE", Color.WHITE)
.put("YELLOW", Color.YELLOW)
.build();
private static final String SAME_AS_INPUT_OPTION = "same as input";
private static final float HALF_DIAGONAL = 1f / (float) Math.sqrt(2);
private @MonotonicNonNull Runnable onPermissionsGranted;
private @MonotonicNonNull ActivityResultLauncher<Intent> videoLocalFilePickerLauncher;
private @MonotonicNonNull ActivityResultLauncher<Intent> overlayLocalFilePickerLauncher;
private @MonotonicNonNull Button selectPresetFileButton;
private @MonotonicNonNull Button selectLocalFileButton;
private @MonotonicNonNull TextView selectedFileTextView;
private @MonotonicNonNull CheckBox removeAudioCheckbox;
private @MonotonicNonNull CheckBox removeVideoCheckbox;
private @MonotonicNonNull CheckBox flattenForSlowMotionCheckbox;
private @MonotonicNonNull CheckBox forceAudioTrackCheckbox;
private @MonotonicNonNull Spinner audioMimeSpinner;
private @MonotonicNonNull Spinner videoMimeSpinner;
private @MonotonicNonNull Spinner resolutionHeightSpinner;
private @MonotonicNonNull Spinner scaleSpinner;
private @MonotonicNonNull Spinner rotateSpinner;
private @MonotonicNonNull CheckBox trimCheckBox;
private @MonotonicNonNull CheckBox enableFallbackCheckBox;
private @MonotonicNonNull CheckBox enableDebugPreviewCheckBox;
private @MonotonicNonNull CheckBox abortSlowExportCheckBox;
private @MonotonicNonNull Spinner hdrModeSpinner;
private @MonotonicNonNull Button selectAudioEffectsButton;
private @MonotonicNonNull Button selectVideoEffectsButton;
private boolean @MonotonicNonNull [] audioEffectsSelections;
private boolean @MonotonicNonNull [] videoEffectsSelections;
private @Nullable Uri localFileUri;
private int inputUriPosition;
private long trimStartMs;
private long trimEndMs;
private int colorFilterSelection;
private float rgbAdjustmentRedScale;
private float rgbAdjustmentGreenScale;
private float rgbAdjustmentBlueScale;
private float contrastValue;
private float hueAdjustment;
private float saturationAdjustment;
private float lightnessAdjustment;
private float periodicVignetteCenterX;
private float periodicVignetteCenterY;
private float periodicVignetteInnerRadius;
private float periodicVignetteOuterRadius;
private @MonotonicNonNull String bitmapOverlayUri;
private float bitmapOverlayAlpha;
private @MonotonicNonNull String textOverlayText;
private int textOverlayTextColor;
private float textOverlayAlpha;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.configuration_activity);
findViewById(R.id.export_button).setOnClickListener(this::startExport);
videoLocalFilePickerLauncher =
registerForActivityResult(
new ActivityResultContracts.StartActivityForResult(),
this::videoLocalFilePickerLauncherResult);
overlayLocalFilePickerLauncher =
registerForActivityResult(
new ActivityResultContracts.StartActivityForResult(),
this::overlayLocalFilePickerLauncherResult);
selectPresetFileButton = findViewById(R.id.select_preset_file_button);
selectPresetFileButton.setOnClickListener(this::selectPresetFile);
selectLocalFileButton = findViewById(R.id.select_local_file_button);
selectLocalFileButton.setOnClickListener(
view ->
selectLocalFile(
view, checkNotNull(videoLocalFilePickerLauncher), /* mimeType= */ "video/*"));
selectedFileTextView = findViewById(R.id.selected_file_text_view);
selectedFileTextView.setText(PRESET_FILE_URI_DESCRIPTIONS[inputUriPosition]);
removeAudioCheckbox = findViewById(R.id.remove_audio_checkbox);
removeAudioCheckbox.setOnClickListener(this::onRemoveAudio);
removeVideoCheckbox = findViewById(R.id.remove_video_checkbox);
removeVideoCheckbox.setOnClickListener(this::onRemoveVideo);
flattenForSlowMotionCheckbox = findViewById(R.id.flatten_for_slow_motion_checkbox);
forceAudioTrackCheckbox = findViewById(R.id.force_audio_track_checkbox);
ArrayAdapter<String> audioMimeAdapter =
new ArrayAdapter<>(/* context= */ this, R.layout.spinner_item);
audioMimeAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
audioMimeSpinner = findViewById(R.id.audio_mime_spinner);
audioMimeSpinner.setAdapter(audioMimeAdapter);
audioMimeAdapter.addAll(
SAME_AS_INPUT_OPTION, MimeTypes.AUDIO_AAC, MimeTypes.AUDIO_AMR_NB, MimeTypes.AUDIO_AMR_WB);
ArrayAdapter<String> videoMimeAdapter =
new ArrayAdapter<>(/* context= */ this, R.layout.spinner_item);
videoMimeAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
videoMimeSpinner = findViewById(R.id.video_mime_spinner);
videoMimeSpinner.setAdapter(videoMimeAdapter);
videoMimeAdapter.addAll(
SAME_AS_INPUT_OPTION, MimeTypes.VIDEO_H263, MimeTypes.VIDEO_H264, MimeTypes.VIDEO_MP4V);
if (SDK_INT >= 24) {
videoMimeAdapter.add(MimeTypes.VIDEO_H265);
}
ArrayAdapter<String> resolutionHeightAdapter =
new ArrayAdapter<>(/* context= */ this, R.layout.spinner_item);
resolutionHeightAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
resolutionHeightSpinner = findViewById(R.id.resolution_height_spinner);
resolutionHeightSpinner.setAdapter(resolutionHeightAdapter);
resolutionHeightAdapter.addAll(
SAME_AS_INPUT_OPTION, "144", "240", "360", "480", "720", "1080", "1440", "2160");
ArrayAdapter<String> scaleAdapter =
new ArrayAdapter<>(/* context= */ this, R.layout.spinner_item);
scaleAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
scaleSpinner = findViewById(R.id.scale_spinner);
scaleSpinner.setAdapter(scaleAdapter);
scaleAdapter.addAll(SAME_AS_INPUT_OPTION, "-1, -1", "-1, 1", "1, 1", ".5, 1", ".5, .5", "2, 2");
ArrayAdapter<String> rotateAdapter =
new ArrayAdapter<>(/* context= */ this, R.layout.spinner_item);
rotateAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
rotateSpinner = findViewById(R.id.rotate_spinner);
rotateSpinner.setAdapter(rotateAdapter);
rotateAdapter.addAll(SAME_AS_INPUT_OPTION, "0", "10", "45", "60", "90", "180");
trimCheckBox = findViewById(R.id.trim_checkbox);
trimCheckBox.setOnCheckedChangeListener(this::selectTrimBounds);
trimStartMs = C.TIME_UNSET;
trimEndMs = C.TIME_UNSET;
enableFallbackCheckBox = findViewById(R.id.enable_fallback_checkbox);
enableDebugPreviewCheckBox = findViewById(R.id.enable_debug_preview_checkbox);
abortSlowExportCheckBox = findViewById(R.id.abort_slow_export_checkbox);
ArrayAdapter<String> hdrModeAdapter =
new ArrayAdapter<>(/* context= */ this, R.layout.spinner_item);
hdrModeAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
hdrModeSpinner = findViewById(R.id.hdr_mode_spinner);
hdrModeSpinner.setAdapter(hdrModeAdapter);
hdrModeAdapter.addAll(HDR_MODE_DESCRIPTIONS.keySet());
audioEffectsSelections = new boolean[AUDIO_EFFECTS.length];
selectAudioEffectsButton = findViewById(R.id.select_audio_effects_button);
selectAudioEffectsButton.setOnClickListener(this::selectAudioEffects);
videoEffectsSelections = new boolean[VIDEO_EFFECTS.length];
selectVideoEffectsButton = findViewById(R.id.select_video_effects_button);
selectVideoEffectsButton.setOnClickListener(this::selectVideoEffects);
}
@Override
public void onRequestPermissionsResult(
int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == FILE_PERMISSION_REQUEST_CODE
&& grantResults.length == 1
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
checkNotNull(onPermissionsGranted).run();
} else {
Toast.makeText(
getApplicationContext(), getString(R.string.permission_denied), Toast.LENGTH_LONG)
.show();
}
}
@Override
protected void onResume() {
super.onResume();
@Nullable Uri intentUri = getIntent().getData();
if (intentUri != null) {
checkNotNull(selectPresetFileButton).setEnabled(false);
checkNotNull(selectLocalFileButton).setEnabled(false);
checkNotNull(selectedFileTextView).setText(intentUri.toString());
}
}
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
setIntent(intent);
}
@RequiresNonNull({
"removeAudioCheckbox",
"removeVideoCheckbox",
"flattenForSlowMotionCheckbox",
"forceAudioTrackCheckbox",
"audioMimeSpinner",
"videoMimeSpinner",
"resolutionHeightSpinner",
"scaleSpinner",
"rotateSpinner",
"trimCheckBox",
"enableFallbackCheckBox",
"enableDebugPreviewCheckBox",
"abortSlowExportCheckBox",
"hdrModeSpinner",
"audioEffectsSelections",
"videoEffectsSelections"
})
private void startExport(View view) {
Intent transformerIntent = new Intent(/* packageContext= */ this, TransformerActivity.class);
Bundle bundle = new Bundle();
bundle.putBoolean(SHOULD_REMOVE_AUDIO, removeAudioCheckbox.isChecked());
bundle.putBoolean(SHOULD_REMOVE_VIDEO, removeVideoCheckbox.isChecked());
bundle.putBoolean(SHOULD_FLATTEN_FOR_SLOW_MOTION, flattenForSlowMotionCheckbox.isChecked());
bundle.putBoolean(FORCE_AUDIO_TRACK, forceAudioTrackCheckbox.isChecked());
String selectedAudioMimeType = String.valueOf(audioMimeSpinner.getSelectedItem());
if (!SAME_AS_INPUT_OPTION.equals(selectedAudioMimeType)) {
bundle.putString(AUDIO_MIME_TYPE, selectedAudioMimeType);
}
String selectedVideoMimeType = String.valueOf(videoMimeSpinner.getSelectedItem());
if (!SAME_AS_INPUT_OPTION.equals(selectedVideoMimeType)) {
bundle.putString(VIDEO_MIME_TYPE, selectedVideoMimeType);
}
String selectedResolutionHeight = String.valueOf(resolutionHeightSpinner.getSelectedItem());
if (!SAME_AS_INPUT_OPTION.equals(selectedResolutionHeight)) {
bundle.putInt(RESOLUTION_HEIGHT, Integer.parseInt(selectedResolutionHeight));
}
String selectedScale = String.valueOf(scaleSpinner.getSelectedItem());
if (!SAME_AS_INPUT_OPTION.equals(selectedScale)) {
List<String> scaleXY = Arrays.asList(selectedScale.split(", "));
checkState(scaleXY.size() == 2);
bundle.putFloat(SCALE_X, Float.parseFloat(scaleXY.get(0)));
bundle.putFloat(SCALE_Y, Float.parseFloat(scaleXY.get(1)));
}
String selectedRotate = String.valueOf(rotateSpinner.getSelectedItem());
if (!SAME_AS_INPUT_OPTION.equals(selectedRotate)) {
bundle.putFloat(ROTATE_DEGREES, Float.parseFloat(selectedRotate));
}
if (trimCheckBox.isChecked()) {
bundle.putLong(TRIM_START_MS, trimStartMs);
bundle.putLong(TRIM_END_MS, trimEndMs);
}
bundle.putBoolean(ENABLE_FALLBACK, enableFallbackCheckBox.isChecked());
bundle.putBoolean(ENABLE_DEBUG_PREVIEW, enableDebugPreviewCheckBox.isChecked());
bundle.putBoolean(ABORT_SLOW_EXPORT, abortSlowExportCheckBox.isChecked());
String selectedhdrMode = String.valueOf(hdrModeSpinner.getSelectedItem());
bundle.putInt(HDR_MODE, checkNotNull(HDR_MODE_DESCRIPTIONS.get(selectedhdrMode)));
bundle.putBooleanArray(AUDIO_EFFECTS_SELECTIONS, audioEffectsSelections);
bundle.putBooleanArray(VIDEO_EFFECTS_SELECTIONS, videoEffectsSelections);
bundle.putInt(COLOR_FILTER_SELECTION, colorFilterSelection);
bundle.putFloat(CONTRAST_VALUE, contrastValue);
bundle.putFloat(RGB_ADJUSTMENT_RED_SCALE, rgbAdjustmentRedScale);
bundle.putFloat(RGB_ADJUSTMENT_GREEN_SCALE, rgbAdjustmentGreenScale);
bundle.putFloat(RGB_ADJUSTMENT_BLUE_SCALE, rgbAdjustmentBlueScale);
bundle.putFloat(HSL_ADJUSTMENTS_HUE, hueAdjustment);
bundle.putFloat(HSL_ADJUSTMENTS_SATURATION, saturationAdjustment);
bundle.putFloat(HSL_ADJUSTMENTS_LIGHTNESS, lightnessAdjustment);
bundle.putFloat(PERIODIC_VIGNETTE_CENTER_X, periodicVignetteCenterX);
bundle.putFloat(PERIODIC_VIGNETTE_CENTER_Y, periodicVignetteCenterY);
bundle.putFloat(PERIODIC_VIGNETTE_INNER_RADIUS, periodicVignetteInnerRadius);
bundle.putFloat(PERIODIC_VIGNETTE_OUTER_RADIUS, periodicVignetteOuterRadius);
bundle.putString(BITMAP_OVERLAY_URI, bitmapOverlayUri);
bundle.putFloat(BITMAP_OVERLAY_ALPHA, bitmapOverlayAlpha);
bundle.putString(TEXT_OVERLAY_TEXT, textOverlayText);
bundle.putInt(TEXT_OVERLAY_TEXT_COLOR, textOverlayTextColor);
bundle.putFloat(TEXT_OVERLAY_ALPHA, textOverlayAlpha);
transformerIntent.putExtras(bundle);
@Nullable Uri intentUri;
if (getIntent().getData() != null) {
intentUri = getIntent().getData();
} else if (localFileUri != null) {
intentUri = localFileUri;
} else {
intentUri = Uri.parse(PRESET_FILE_URIS[inputUriPosition]);
}
transformerIntent.setData(intentUri);
startActivity(transformerIntent);
}
private void selectPresetFile(View view) {
new AlertDialog.Builder(/* context= */ this)
.setTitle(R.string.select_preset_file_title)
.setSingleChoiceItems(
PRESET_FILE_URI_DESCRIPTIONS, inputUriPosition, this::selectPresetFileInDialog)
.setPositiveButton(android.R.string.ok, /* listener= */ null)
.create()
.show();
}
@RequiresNonNull("selectedFileTextView")
private void selectPresetFileInDialog(DialogInterface dialog, int which) {
inputUriPosition = which;
localFileUri = null;
selectedFileTextView.setText(PRESET_FILE_URI_DESCRIPTIONS[inputUriPosition]);
}
private void selectLocalFile(
View view, ActivityResultLauncher<Intent> localFilePickerLauncher, String mimeType) {
String permission = SDK_INT >= 33 ? READ_MEDIA_VIDEO : READ_EXTERNAL_STORAGE;
if (ActivityCompat.checkSelfPermission(/* context= */ this, permission)
!= PackageManager.PERMISSION_GRANTED) {
onPermissionsGranted = () -> launchLocalFilePicker(localFilePickerLauncher, mimeType);
ActivityCompat.requestPermissions(
/* activity= */ this, new String[] {permission}, FILE_PERMISSION_REQUEST_CODE);
} else {
launchLocalFilePicker(localFilePickerLauncher, mimeType);
}
}
private void launchLocalFilePicker(
ActivityResultLauncher<Intent> localFilePickerLauncher, String mimeType) {
Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
intent.setType(mimeType);
checkNotNull(localFilePickerLauncher).launch(intent);
}
@RequiresNonNull("selectedFileTextView")
private void videoLocalFilePickerLauncherResult(ActivityResult result) {
Intent data = result.getData();
if (data != null) {
localFileUri = checkNotNull(data.getData());
selectedFileTextView.setText(localFileUri.toString());
} else {
Toast.makeText(
getApplicationContext(),
getString(R.string.local_file_picker_failed),
Toast.LENGTH_SHORT)
.show();
}
}
private void overlayLocalFilePickerLauncherResult(ActivityResult result) {
Intent data = result.getData();
if (data != null) {
bitmapOverlayUri = checkNotNull(data.getData()).toString();
} else {
Toast.makeText(
getApplicationContext(),
getString(R.string.local_file_picker_failed),
Toast.LENGTH_SHORT)
.show();
}
}
private void selectAudioEffects(View view) {
new AlertDialog.Builder(/* context= */ this)
.setTitle(R.string.select_audio_effects)
.setMultiChoiceItems(
AUDIO_EFFECTS, checkNotNull(audioEffectsSelections), this::selectAudioEffect)
.setPositiveButton(android.R.string.ok, /* listener= */ null)
.create()
.show();
}
private void selectVideoEffects(View view) {
new AlertDialog.Builder(/* context= */ this)
.setTitle(R.string.select_video_effects)
.setMultiChoiceItems(
VIDEO_EFFECTS, checkNotNull(videoEffectsSelections), this::selectVideoEffect)
.setPositiveButton(android.R.string.ok, /* listener= */ null)
.create()
.show();
}
private void selectTrimBounds(View view, boolean isChecked) {
if (!isChecked) {
return;
}
View dialogView = getLayoutInflater().inflate(R.layout.trim_options, /* root= */ null);
RangeSlider trimRangeSlider =
checkNotNull(dialogView.findViewById(R.id.trim_bounds_range_slider));
trimRangeSlider.setValues(0f, 1f); // seconds
new AlertDialog.Builder(/* context= */ this)
.setView(dialogView)
.setPositiveButton(
android.R.string.ok,
(DialogInterface dialogInterface, int i) -> {
List<Float> trimRange = trimRangeSlider.getValues();
trimStartMs = Math.round(1000 * trimRange.get(0));
trimEndMs = Math.round(1000 * trimRange.get(1));
})
.create()
.show();
}
@RequiresNonNull("audioEffectsSelections")
private void selectAudioEffect(DialogInterface dialog, int which, boolean isChecked) {
audioEffectsSelections[which] = isChecked;
}
@RequiresNonNull("videoEffectsSelections")
private void selectVideoEffect(DialogInterface dialog, int which, boolean isChecked) {
videoEffectsSelections[which] = isChecked;
if (!isChecked) {
return;
}
switch (which) {
case COLOR_FILTERS_INDEX:
controlColorFiltersSettings();
break;
case RGB_ADJUSTMENTS_INDEX:
controlRgbAdjustmentsScale();
break;
case CONTRAST_INDEX:
controlContrastSettings();
break;
case HSL_ADJUSTMENT_INDEX:
controlHslAdjustmentSettings();
break;
case PERIODIC_VIGNETTE_INDEX:
controlPeriodicVignetteSettings();
break;
case BITMAP_OVERLAY_INDEX:
controlBitmapOverlaySettings();
break;
case TEXT_OVERLAY_INDEX:
controlTextOverlaySettings();
break;
}
}
private void controlColorFiltersSettings() {
new AlertDialog.Builder(/* context= */ this)
.setPositiveButton(android.R.string.ok, (dialogInterface, i) -> dialogInterface.dismiss())
.setSingleChoiceItems(
this.getResources().getStringArray(R.array.color_filter_options),
colorFilterSelection,
(DialogInterface dialogInterface, int i) -> {
checkState(
i == COLOR_FILTER_GRAYSCALE
|| i == COLOR_FILTER_INVERTED
|| i == COLOR_FILTER_SEPIA);
colorFilterSelection = i;
dialogInterface.dismiss();
})
.create()
.show();
}
private void controlRgbAdjustmentsScale() {
View dialogView =
getLayoutInflater().inflate(R.layout.rgb_adjustment_options, /* root= */ null);
Slider redScaleSlider = checkNotNull(dialogView.findViewById(R.id.rgb_adjustment_red_scale));
Slider greenScaleSlider =
checkNotNull(dialogView.findViewById(R.id.rgb_adjustment_green_scale));
Slider blueScaleSlider = checkNotNull(dialogView.findViewById(R.id.rgb_adjustment_blue_scale));
new AlertDialog.Builder(/* context= */ this)
.setTitle(R.string.rgb_adjustment_options)
.setView(dialogView)
.setPositiveButton(
android.R.string.ok,
(DialogInterface dialogInterface, int i) -> {
rgbAdjustmentRedScale = redScaleSlider.getValue();
rgbAdjustmentGreenScale = greenScaleSlider.getValue();
rgbAdjustmentBlueScale = blueScaleSlider.getValue();
})
.create()
.show();
}
private void controlContrastSettings() {
View dialogView = getLayoutInflater().inflate(R.layout.contrast_options, /* root= */ null);
Slider contrastSlider = checkNotNull(dialogView.findViewById(R.id.contrast_slider));
new AlertDialog.Builder(/* context= */ this)
.setView(dialogView)
.setPositiveButton(
android.R.string.ok,
(DialogInterface dialogInterface, int i) -> contrastValue = contrastSlider.getValue())
.create()
.show();
}
private void controlHslAdjustmentSettings() {
View dialogView =
getLayoutInflater().inflate(R.layout.hsl_adjustment_options, /* root= */ null);
Slider hueAdjustmentSlider = checkNotNull(dialogView.findViewById(R.id.hsl_adjustments_hue));
Slider saturationAdjustmentSlider =
checkNotNull(dialogView.findViewById(R.id.hsl_adjustments_saturation));
Slider lightnessAdjustmentSlider =
checkNotNull(dialogView.findViewById(R.id.hsl_adjustment_lightness));
new AlertDialog.Builder(/* context= */ this)
.setTitle(R.string.hsl_adjustment_options)
.setView(dialogView)
.setPositiveButton(
android.R.string.ok,
(DialogInterface dialogInterface, int i) -> {
hueAdjustment = hueAdjustmentSlider.getValue();
saturationAdjustment = saturationAdjustmentSlider.getValue();
lightnessAdjustment = lightnessAdjustmentSlider.getValue();
})
.create()
.show();
}
private void controlPeriodicVignetteSettings() {
View dialogView =
getLayoutInflater().inflate(R.layout.periodic_vignette_options, /* root= */ null);
Slider centerXSlider =
checkNotNull(dialogView.findViewById(R.id.periodic_vignette_center_x_slider));
Slider centerYSlider =
checkNotNull(dialogView.findViewById(R.id.periodic_vignette_center_y_slider));
RangeSlider radiusRangeSlider =
checkNotNull(dialogView.findViewById(R.id.periodic_vignette_radius_range_slider));
radiusRangeSlider.setValues(0f, HALF_DIAGONAL);
new AlertDialog.Builder(/* context= */ this)
.setTitle(R.string.periodic_vignette_options)
.setView(dialogView)
.setPositiveButton(
android.R.string.ok,
(DialogInterface dialogInterface, int i) -> {
periodicVignetteCenterX = centerXSlider.getValue();
periodicVignetteCenterY = centerYSlider.getValue();
List<Float> radiusRange = radiusRangeSlider.getValues();
periodicVignetteInnerRadius = radiusRange.get(0);
periodicVignetteOuterRadius = radiusRange.get(1);
})
.create()
.show();
}
private void controlBitmapOverlaySettings() {
View dialogView =
getLayoutInflater().inflate(R.layout.bitmap_overlay_options, /* root= */ null);
Button uriButton = checkNotNull(dialogView.findViewById(R.id.bitmap_overlay_uri));
uriButton.setOnClickListener(
(view ->
selectLocalFile(
view, checkNotNull(overlayLocalFilePickerLauncher), /* mimeType= */ "image/*")));
Slider alphaSlider = checkNotNull(dialogView.findViewById(R.id.bitmap_overlay_alpha_slider));
new AlertDialog.Builder(/* context= */ this)
.setTitle(R.string.bitmap_overlay_settings)
.setView(dialogView)
.setPositiveButton(
android.R.string.ok,
(DialogInterface dialogInterface, int i) -> {
bitmapOverlayAlpha = alphaSlider.getValue();
})
.create()
.show();
}
private void controlTextOverlaySettings() {
View dialogView = getLayoutInflater().inflate(R.layout.text_overlay_options, /* root= */ null);
EditText textEditText = checkNotNull(dialogView.findViewById(R.id.text_overlay_text));
ArrayAdapter<String> textColorAdapter =
new ArrayAdapter<>(/* context= */ this, R.layout.spinner_item);
textColorAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
Spinner textColorSpinner = checkNotNull(dialogView.findViewById(R.id.text_overlay_text_color));
textColorSpinner.setAdapter(textColorAdapter);
textColorAdapter.addAll(OVERLAY_COLORS.keySet());
Slider alphaSlider = checkNotNull(dialogView.findViewById(R.id.text_overlay_alpha_slider));
new AlertDialog.Builder(/* context= */ this)
.setTitle(R.string.bitmap_overlay_settings)
.setView(dialogView)
.setPositiveButton(
android.R.string.ok,
(DialogInterface dialogInterface, int i) -> {
textOverlayText = textEditText.getText().toString();
String selectedTextColor = String.valueOf(textColorSpinner.getSelectedItem());
textOverlayTextColor = checkNotNull(OVERLAY_COLORS.get(selectedTextColor));
textOverlayAlpha = alphaSlider.getValue();
})
.create()
.show();
}
@RequiresNonNull({
"removeVideoCheckbox",
"forceAudioTrackCheckbox",
"audioMimeSpinner",
"videoMimeSpinner",
"resolutionHeightSpinner",
"scaleSpinner",
"rotateSpinner",
"enableDebugPreviewCheckBox",
"hdrModeSpinner",
"selectAudioEffectsButton",
"selectVideoEffectsButton"
})
private void onRemoveAudio(View view) {
if (((CheckBox) view).isChecked()) {
removeVideoCheckbox.setChecked(false);
enableTrackSpecificOptions(/* isAudioEnabled= */ false, /* isVideoEnabled= */ true);
} else {
enableTrackSpecificOptions(/* isAudioEnabled= */ true, /* isVideoEnabled= */ true);
}
}
@RequiresNonNull({
"removeAudioCheckbox",
"forceAudioTrackCheckbox",
"audioMimeSpinner",
"videoMimeSpinner",
"resolutionHeightSpinner",
"scaleSpinner",
"rotateSpinner",
"enableDebugPreviewCheckBox",
"hdrModeSpinner",
"selectAudioEffectsButton",
"selectVideoEffectsButton"
})
private void onRemoveVideo(View view) {
if (((CheckBox) view).isChecked()) {
removeAudioCheckbox.setChecked(false);
enableTrackSpecificOptions(/* isAudioEnabled= */ true, /* isVideoEnabled= */ false);
} else {
enableTrackSpecificOptions(/* isAudioEnabled= */ true, /* isVideoEnabled= */ true);
}
}
@RequiresNonNull({
"forceAudioTrackCheckbox",
"audioMimeSpinner",
"videoMimeSpinner",
"resolutionHeightSpinner",
"scaleSpinner",
"rotateSpinner",
"enableDebugPreviewCheckBox",
"hdrModeSpinner",
"selectAudioEffectsButton",
"selectVideoEffectsButton"
})
private void enableTrackSpecificOptions(boolean isAudioEnabled, boolean isVideoEnabled) {
forceAudioTrackCheckbox.setEnabled(isVideoEnabled);
audioMimeSpinner.setEnabled(isAudioEnabled);
videoMimeSpinner.setEnabled(isVideoEnabled);
resolutionHeightSpinner.setEnabled(isVideoEnabled);
scaleSpinner.setEnabled(isVideoEnabled);
rotateSpinner.setEnabled(isVideoEnabled);
enableDebugPreviewCheckBox.setEnabled(isVideoEnabled);
hdrModeSpinner.setEnabled(isVideoEnabled);
selectAudioEffectsButton.setEnabled(isAudioEnabled);
selectVideoEffectsButton.setEnabled(isVideoEnabled);
findViewById(R.id.audio_mime_text_view).setEnabled(isAudioEnabled);
findViewById(R.id.video_mime_text_view).setEnabled(isVideoEnabled);
findViewById(R.id.resolution_height_text_view).setEnabled(isVideoEnabled);
findViewById(R.id.scale).setEnabled(isVideoEnabled);
findViewById(R.id.rotate).setEnabled(isVideoEnabled);
findViewById(R.id.hdr_mode).setEnabled(isVideoEnabled);
}
}
|
googleapis/google-cloud-java | 38,080 | java-dms/proto-google-cloud-dms-v1/src/main/java/com/google/cloud/clouddms/v1/SequenceEntity.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/clouddms/v1/conversionworkspace_resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.clouddms.v1;
/**
*
*
* <pre>
* Sequence's parent is a schema.
* </pre>
*
* Protobuf type {@code google.cloud.clouddms.v1.SequenceEntity}
*/
public final class SequenceEntity extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.clouddms.v1.SequenceEntity)
SequenceEntityOrBuilder {
private static final long serialVersionUID = 0L;
// Use SequenceEntity.newBuilder() to construct.
private SequenceEntity(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SequenceEntity() {
startValue_ = com.google.protobuf.ByteString.EMPTY;
maxValue_ = com.google.protobuf.ByteString.EMPTY;
minValue_ = com.google.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SequenceEntity();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.clouddms.v1.ConversionWorkspaceResourcesProto
.internal_static_google_cloud_clouddms_v1_SequenceEntity_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.clouddms.v1.ConversionWorkspaceResourcesProto
.internal_static_google_cloud_clouddms_v1_SequenceEntity_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.clouddms.v1.SequenceEntity.class,
com.google.cloud.clouddms.v1.SequenceEntity.Builder.class);
}
private int bitField0_;
public static final int INCREMENT_FIELD_NUMBER = 1;
private long increment_ = 0L;
/**
*
*
* <pre>
* Increment value for the sequence.
* </pre>
*
* <code>int64 increment = 1;</code>
*
* @return The increment.
*/
@java.lang.Override
public long getIncrement() {
return increment_;
}
public static final int START_VALUE_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString startValue_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* Start number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes start_value = 2;</code>
*
* @return The startValue.
*/
@java.lang.Override
public com.google.protobuf.ByteString getStartValue() {
return startValue_;
}
public static final int MAX_VALUE_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString maxValue_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* Maximum number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes max_value = 3;</code>
*
* @return The maxValue.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMaxValue() {
return maxValue_;
}
public static final int MIN_VALUE_FIELD_NUMBER = 4;
private com.google.protobuf.ByteString minValue_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* Minimum number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes min_value = 4;</code>
*
* @return The minValue.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMinValue() {
return minValue_;
}
public static final int CYCLE_FIELD_NUMBER = 5;
private boolean cycle_ = false;
/**
*
*
* <pre>
* Indicates whether the sequence value should cycle through.
* </pre>
*
* <code>bool cycle = 5;</code>
*
* @return The cycle.
*/
@java.lang.Override
public boolean getCycle() {
return cycle_;
}
public static final int CACHE_FIELD_NUMBER = 6;
private long cache_ = 0L;
/**
*
*
* <pre>
* Indicates number of entries to cache / precreate.
* </pre>
*
* <code>int64 cache = 6;</code>
*
* @return The cache.
*/
@java.lang.Override
public long getCache() {
return cache_;
}
public static final int CUSTOM_FEATURES_FIELD_NUMBER = 7;
private com.google.protobuf.Struct customFeatures_;
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*
* @return Whether the customFeatures field is set.
*/
@java.lang.Override
public boolean hasCustomFeatures() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*
* @return The customFeatures.
*/
@java.lang.Override
public com.google.protobuf.Struct getCustomFeatures() {
return customFeatures_ == null
? com.google.protobuf.Struct.getDefaultInstance()
: customFeatures_;
}
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*/
@java.lang.Override
public com.google.protobuf.StructOrBuilder getCustomFeaturesOrBuilder() {
return customFeatures_ == null
? com.google.protobuf.Struct.getDefaultInstance()
: customFeatures_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (increment_ != 0L) {
output.writeInt64(1, increment_);
}
if (!startValue_.isEmpty()) {
output.writeBytes(2, startValue_);
}
if (!maxValue_.isEmpty()) {
output.writeBytes(3, maxValue_);
}
if (!minValue_.isEmpty()) {
output.writeBytes(4, minValue_);
}
if (cycle_ != false) {
output.writeBool(5, cycle_);
}
if (cache_ != 0L) {
output.writeInt64(6, cache_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(7, getCustomFeatures());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (increment_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, increment_);
}
if (!startValue_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream.computeBytesSize(2, startValue_);
}
if (!maxValue_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream.computeBytesSize(3, maxValue_);
}
if (!minValue_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream.computeBytesSize(4, minValue_);
}
if (cycle_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(5, cycle_);
}
if (cache_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(6, cache_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(7, getCustomFeatures());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.clouddms.v1.SequenceEntity)) {
return super.equals(obj);
}
com.google.cloud.clouddms.v1.SequenceEntity other =
(com.google.cloud.clouddms.v1.SequenceEntity) obj;
if (getIncrement() != other.getIncrement()) return false;
if (!getStartValue().equals(other.getStartValue())) return false;
if (!getMaxValue().equals(other.getMaxValue())) return false;
if (!getMinValue().equals(other.getMinValue())) return false;
if (getCycle() != other.getCycle()) return false;
if (getCache() != other.getCache()) return false;
if (hasCustomFeatures() != other.hasCustomFeatures()) return false;
if (hasCustomFeatures()) {
if (!getCustomFeatures().equals(other.getCustomFeatures())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + INCREMENT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getIncrement());
hash = (37 * hash) + START_VALUE_FIELD_NUMBER;
hash = (53 * hash) + getStartValue().hashCode();
hash = (37 * hash) + MAX_VALUE_FIELD_NUMBER;
hash = (53 * hash) + getMaxValue().hashCode();
hash = (37 * hash) + MIN_VALUE_FIELD_NUMBER;
hash = (53 * hash) + getMinValue().hashCode();
hash = (37 * hash) + CYCLE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getCycle());
hash = (37 * hash) + CACHE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getCache());
if (hasCustomFeatures()) {
hash = (37 * hash) + CUSTOM_FEATURES_FIELD_NUMBER;
hash = (53 * hash) + getCustomFeatures().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.clouddms.v1.SequenceEntity parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.clouddms.v1.SequenceEntity prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Sequence's parent is a schema.
* </pre>
*
* Protobuf type {@code google.cloud.clouddms.v1.SequenceEntity}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.clouddms.v1.SequenceEntity)
com.google.cloud.clouddms.v1.SequenceEntityOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.clouddms.v1.ConversionWorkspaceResourcesProto
.internal_static_google_cloud_clouddms_v1_SequenceEntity_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.clouddms.v1.ConversionWorkspaceResourcesProto
.internal_static_google_cloud_clouddms_v1_SequenceEntity_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.clouddms.v1.SequenceEntity.class,
com.google.cloud.clouddms.v1.SequenceEntity.Builder.class);
}
// Construct using com.google.cloud.clouddms.v1.SequenceEntity.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCustomFeaturesFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
increment_ = 0L;
startValue_ = com.google.protobuf.ByteString.EMPTY;
maxValue_ = com.google.protobuf.ByteString.EMPTY;
minValue_ = com.google.protobuf.ByteString.EMPTY;
cycle_ = false;
cache_ = 0L;
customFeatures_ = null;
if (customFeaturesBuilder_ != null) {
customFeaturesBuilder_.dispose();
customFeaturesBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.clouddms.v1.ConversionWorkspaceResourcesProto
.internal_static_google_cloud_clouddms_v1_SequenceEntity_descriptor;
}
@java.lang.Override
public com.google.cloud.clouddms.v1.SequenceEntity getDefaultInstanceForType() {
return com.google.cloud.clouddms.v1.SequenceEntity.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.clouddms.v1.SequenceEntity build() {
com.google.cloud.clouddms.v1.SequenceEntity result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.clouddms.v1.SequenceEntity buildPartial() {
com.google.cloud.clouddms.v1.SequenceEntity result =
new com.google.cloud.clouddms.v1.SequenceEntity(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.clouddms.v1.SequenceEntity result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.increment_ = increment_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.startValue_ = startValue_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.maxValue_ = maxValue_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.minValue_ = minValue_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.cycle_ = cycle_;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.cache_ = cache_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000040) != 0)) {
result.customFeatures_ =
customFeaturesBuilder_ == null ? customFeatures_ : customFeaturesBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.clouddms.v1.SequenceEntity) {
return mergeFrom((com.google.cloud.clouddms.v1.SequenceEntity) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.clouddms.v1.SequenceEntity other) {
if (other == com.google.cloud.clouddms.v1.SequenceEntity.getDefaultInstance()) return this;
if (other.getIncrement() != 0L) {
setIncrement(other.getIncrement());
}
if (other.getStartValue() != com.google.protobuf.ByteString.EMPTY) {
setStartValue(other.getStartValue());
}
if (other.getMaxValue() != com.google.protobuf.ByteString.EMPTY) {
setMaxValue(other.getMaxValue());
}
if (other.getMinValue() != com.google.protobuf.ByteString.EMPTY) {
setMinValue(other.getMinValue());
}
if (other.getCycle() != false) {
setCycle(other.getCycle());
}
if (other.getCache() != 0L) {
setCache(other.getCache());
}
if (other.hasCustomFeatures()) {
mergeCustomFeatures(other.getCustomFeatures());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
increment_ = input.readInt64();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
startValue_ = input.readBytes();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
maxValue_ = input.readBytes();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
minValue_ = input.readBytes();
bitField0_ |= 0x00000008;
break;
} // case 34
case 40:
{
cycle_ = input.readBool();
bitField0_ |= 0x00000010;
break;
} // case 40
case 48:
{
cache_ = input.readInt64();
bitField0_ |= 0x00000020;
break;
} // case 48
case 58:
{
input.readMessage(getCustomFeaturesFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000040;
break;
} // case 58
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private long increment_;
/**
*
*
* <pre>
* Increment value for the sequence.
* </pre>
*
* <code>int64 increment = 1;</code>
*
* @return The increment.
*/
@java.lang.Override
public long getIncrement() {
return increment_;
}
/**
*
*
* <pre>
* Increment value for the sequence.
* </pre>
*
* <code>int64 increment = 1;</code>
*
* @param value The increment to set.
* @return This builder for chaining.
*/
public Builder setIncrement(long value) {
increment_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Increment value for the sequence.
* </pre>
*
* <code>int64 increment = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearIncrement() {
bitField0_ = (bitField0_ & ~0x00000001);
increment_ = 0L;
onChanged();
return this;
}
private com.google.protobuf.ByteString startValue_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* Start number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes start_value = 2;</code>
*
* @return The startValue.
*/
@java.lang.Override
public com.google.protobuf.ByteString getStartValue() {
return startValue_;
}
/**
*
*
* <pre>
* Start number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes start_value = 2;</code>
*
* @param value The startValue to set.
* @return This builder for chaining.
*/
public Builder setStartValue(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
startValue_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Start number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes start_value = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearStartValue() {
bitField0_ = (bitField0_ & ~0x00000002);
startValue_ = getDefaultInstance().getStartValue();
onChanged();
return this;
}
private com.google.protobuf.ByteString maxValue_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* Maximum number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes max_value = 3;</code>
*
* @return The maxValue.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMaxValue() {
return maxValue_;
}
/**
*
*
* <pre>
* Maximum number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes max_value = 3;</code>
*
* @param value The maxValue to set.
* @return This builder for chaining.
*/
public Builder setMaxValue(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
maxValue_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Maximum number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes max_value = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearMaxValue() {
bitField0_ = (bitField0_ & ~0x00000004);
maxValue_ = getDefaultInstance().getMaxValue();
onChanged();
return this;
}
private com.google.protobuf.ByteString minValue_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* Minimum number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes min_value = 4;</code>
*
* @return The minValue.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMinValue() {
return minValue_;
}
/**
*
*
* <pre>
* Minimum number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes min_value = 4;</code>
*
* @param value The minValue to set.
* @return This builder for chaining.
*/
public Builder setMinValue(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
minValue_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Minimum number for the sequence represented as bytes to accommodate large.
* numbers
* </pre>
*
* <code>bytes min_value = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearMinValue() {
bitField0_ = (bitField0_ & ~0x00000008);
minValue_ = getDefaultInstance().getMinValue();
onChanged();
return this;
}
private boolean cycle_;
/**
*
*
* <pre>
* Indicates whether the sequence value should cycle through.
* </pre>
*
* <code>bool cycle = 5;</code>
*
* @return The cycle.
*/
@java.lang.Override
public boolean getCycle() {
return cycle_;
}
/**
*
*
* <pre>
* Indicates whether the sequence value should cycle through.
* </pre>
*
* <code>bool cycle = 5;</code>
*
* @param value The cycle to set.
* @return This builder for chaining.
*/
public Builder setCycle(boolean value) {
cycle_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates whether the sequence value should cycle through.
* </pre>
*
* <code>bool cycle = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearCycle() {
bitField0_ = (bitField0_ & ~0x00000010);
cycle_ = false;
onChanged();
return this;
}
private long cache_;
/**
*
*
* <pre>
* Indicates number of entries to cache / precreate.
* </pre>
*
* <code>int64 cache = 6;</code>
*
* @return The cache.
*/
@java.lang.Override
public long getCache() {
return cache_;
}
/**
*
*
* <pre>
* Indicates number of entries to cache / precreate.
* </pre>
*
* <code>int64 cache = 6;</code>
*
* @param value The cache to set.
* @return This builder for chaining.
*/
public Builder setCache(long value) {
cache_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates number of entries to cache / precreate.
* </pre>
*
* <code>int64 cache = 6;</code>
*
* @return This builder for chaining.
*/
public Builder clearCache() {
bitField0_ = (bitField0_ & ~0x00000020);
cache_ = 0L;
onChanged();
return this;
}
private com.google.protobuf.Struct customFeatures_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>
customFeaturesBuilder_;
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*
* @return Whether the customFeatures field is set.
*/
public boolean hasCustomFeatures() {
return ((bitField0_ & 0x00000040) != 0);
}
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*
* @return The customFeatures.
*/
public com.google.protobuf.Struct getCustomFeatures() {
if (customFeaturesBuilder_ == null) {
return customFeatures_ == null
? com.google.protobuf.Struct.getDefaultInstance()
: customFeatures_;
} else {
return customFeaturesBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*/
public Builder setCustomFeatures(com.google.protobuf.Struct value) {
if (customFeaturesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
customFeatures_ = value;
} else {
customFeaturesBuilder_.setMessage(value);
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*/
public Builder setCustomFeatures(com.google.protobuf.Struct.Builder builderForValue) {
if (customFeaturesBuilder_ == null) {
customFeatures_ = builderForValue.build();
} else {
customFeaturesBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*/
public Builder mergeCustomFeatures(com.google.protobuf.Struct value) {
if (customFeaturesBuilder_ == null) {
if (((bitField0_ & 0x00000040) != 0)
&& customFeatures_ != null
&& customFeatures_ != com.google.protobuf.Struct.getDefaultInstance()) {
getCustomFeaturesBuilder().mergeFrom(value);
} else {
customFeatures_ = value;
}
} else {
customFeaturesBuilder_.mergeFrom(value);
}
if (customFeatures_ != null) {
bitField0_ |= 0x00000040;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*/
public Builder clearCustomFeatures() {
bitField0_ = (bitField0_ & ~0x00000040);
customFeatures_ = null;
if (customFeaturesBuilder_ != null) {
customFeaturesBuilder_.dispose();
customFeaturesBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*/
public com.google.protobuf.Struct.Builder getCustomFeaturesBuilder() {
bitField0_ |= 0x00000040;
onChanged();
return getCustomFeaturesFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*/
public com.google.protobuf.StructOrBuilder getCustomFeaturesOrBuilder() {
if (customFeaturesBuilder_ != null) {
return customFeaturesBuilder_.getMessageOrBuilder();
} else {
return customFeatures_ == null
? com.google.protobuf.Struct.getDefaultInstance()
: customFeatures_;
}
}
/**
*
*
* <pre>
* Custom engine specific features.
* </pre>
*
* <code>.google.protobuf.Struct custom_features = 7;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>
getCustomFeaturesFieldBuilder() {
if (customFeaturesBuilder_ == null) {
customFeaturesBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>(
getCustomFeatures(), getParentForChildren(), isClean());
customFeatures_ = null;
}
return customFeaturesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.clouddms.v1.SequenceEntity)
}
// @@protoc_insertion_point(class_scope:google.cloud.clouddms.v1.SequenceEntity)
private static final com.google.cloud.clouddms.v1.SequenceEntity DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.clouddms.v1.SequenceEntity();
}
public static com.google.cloud.clouddms.v1.SequenceEntity getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SequenceEntity> PARSER =
new com.google.protobuf.AbstractParser<SequenceEntity>() {
@java.lang.Override
public SequenceEntity parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SequenceEntity> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SequenceEntity> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.clouddms.v1.SequenceEntity getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,128 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/ListEntityTypesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/entity_type.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
* <pre>
* The request message for
* [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3beta1.EntityTypes.ListEntityTypes].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest}
*/
public final class ListEntityTypesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest)
ListEntityTypesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEntityTypesRequest.newBuilder() to construct.
private ListEntityTypesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEntityTypesRequest() {
parent_ = "";
languageCode_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEntityTypesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.EntityTypeProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ListEntityTypesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.EntityTypeProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ListEntityTypesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The agent to list all entity types for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The agent to list all entity types for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* The language to list entity types for. The following fields are language
* dependent:
*
* * `EntityType.entities.value`
* * `EntityType.entities.synonyms`
* * `EntityType.excluded_phrases.value`
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/cx/docs/reference/language)
* are supported.
* Note: languages must be enabled in the agent before they can be used.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
*
*
* <pre>
* The language to list entity types for. The following fields are language
* dependent:
*
* * `EntityType.entities.value`
* * `EntityType.entities.synonyms`
* * `EntityType.excluded_phrases.value`
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/cx/docs/reference/language)
* are supported.
* Note: languages must be enabled in the agent before they can be used.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, languageCode_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, languageCode_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest other =
(com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getLanguageCode().equals(other.getLanguageCode())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [EntityTypes.ListEntityTypes][google.cloud.dialogflow.cx.v3beta1.EntityTypes.ListEntityTypes].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest)
com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.EntityTypeProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ListEntityTypesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.EntityTypeProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ListEntityTypesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
languageCode_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.EntityTypeProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_ListEntityTypesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest build() {
com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest result =
new com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.languageCode_ = languageCode_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest other) {
if (other
== com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getLanguageCode().isEmpty()) {
languageCode_ = other.languageCode_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
languageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The agent to list all entity types for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to list all entity types for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to list all entity types for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to list all entity types for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to list all entity types for.
* Format: `projects/<ProjectID>/locations/<LocationID>/agents/<AgentID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* The language to list entity types for. The following fields are language
* dependent:
*
* * `EntityType.entities.value`
* * `EntityType.entities.synonyms`
* * `EntityType.excluded_phrases.value`
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/cx/docs/reference/language)
* are supported.
* Note: languages must be enabled in the agent before they can be used.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The language to list entity types for. The following fields are language
* dependent:
*
* * `EntityType.entities.value`
* * `EntityType.entities.synonyms`
* * `EntityType.excluded_phrases.value`
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/cx/docs/reference/language)
* are supported.
* Note: languages must be enabled in the agent before they can be used.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The language to list entity types for. The following fields are language
* dependent:
*
* * `EntityType.entities.value`
* * `EntityType.entities.synonyms`
* * `EntityType.excluded_phrases.value`
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/cx/docs/reference/language)
* are supported.
* Note: languages must be enabled in the agent before they can be used.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
languageCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The language to list entity types for. The following fields are language
* dependent:
*
* * `EntityType.entities.value`
* * `EntityType.entities.synonyms`
* * `EntityType.excluded_phrases.value`
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/cx/docs/reference/language)
* are supported.
* Note: languages must be enabled in the agent before they can be used.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The language to list entity types for. The following fields are language
* dependent:
*
* * `EntityType.entities.value`
* * `EntityType.entities.synonyms`
* * `EntityType.excluded_phrases.value`
*
* If not specified, the agent's default language is used.
* [Many
* languages](https://cloud.google.com/dialogflow/cx/docs/reference/language)
* are supported.
* Note: languages must be enabled in the agent before they can be used.
* </pre>
*
* <code>string language_code = 2;</code>
*
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
languageCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of items to return in a single page. By default 100 and
* at most 1000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest)
private static final com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest();
}
public static com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEntityTypesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListEntityTypesRequest>() {
@java.lang.Override
public ListEntityTypesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEntityTypesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEntityTypesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.ListEntityTypesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 38,177 | java-filestore/proto-google-cloud-filestore-v1beta1/src/main/java/com/google/cloud/filestore/v1beta1/CreateBackupRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/filestore/v1beta1/cloud_filestore_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.filestore.v1beta1;
/**
*
*
* <pre>
* CreateBackupRequest creates a backup.
* </pre>
*
* Protobuf type {@code google.cloud.filestore.v1beta1.CreateBackupRequest}
*/
public final class CreateBackupRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.filestore.v1beta1.CreateBackupRequest)
CreateBackupRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateBackupRequest.newBuilder() to construct.
private CreateBackupRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateBackupRequest() {
parent_ = "";
backupId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateBackupRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateBackupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateBackupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.filestore.v1beta1.CreateBackupRequest.class,
com.google.cloud.filestore.v1beta1.CreateBackupRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The backup's project and location, in the format
* `projects/{project_id}/locations/{location}`. In Filestore,
* backup locations map to Google Cloud regions, for example **us-west1**.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The backup's project and location, in the format
* `projects/{project_id}/locations/{location}`. In Filestore,
* backup locations map to Google Cloud regions, for example **us-west1**.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BACKUP_FIELD_NUMBER = 2;
private com.google.cloud.filestore.v1beta1.Backup backup_;
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the backup field is set.
*/
@java.lang.Override
public boolean hasBackup() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The backup.
*/
@java.lang.Override
public com.google.cloud.filestore.v1beta1.Backup getBackup() {
return backup_ == null
? com.google.cloud.filestore.v1beta1.Backup.getDefaultInstance()
: backup_;
}
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.filestore.v1beta1.BackupOrBuilder getBackupOrBuilder() {
return backup_ == null
? com.google.cloud.filestore.v1beta1.Backup.getDefaultInstance()
: backup_;
}
public static final int BACKUP_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object backupId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the backup.
* The ID must be unique within the specified project and location.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string backup_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The backupId.
*/
@java.lang.Override
public java.lang.String getBackupId() {
java.lang.Object ref = backupId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
backupId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the backup.
* The ID must be unique within the specified project and location.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string backup_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for backupId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBackupIdBytes() {
java.lang.Object ref = backupId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
backupId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getBackup());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(backupId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, backupId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getBackup());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(backupId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, backupId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.filestore.v1beta1.CreateBackupRequest)) {
return super.equals(obj);
}
com.google.cloud.filestore.v1beta1.CreateBackupRequest other =
(com.google.cloud.filestore.v1beta1.CreateBackupRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasBackup() != other.hasBackup()) return false;
if (hasBackup()) {
if (!getBackup().equals(other.getBackup())) return false;
}
if (!getBackupId().equals(other.getBackupId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasBackup()) {
hash = (37 * hash) + BACKUP_FIELD_NUMBER;
hash = (53 * hash) + getBackup().hashCode();
}
hash = (37 * hash) + BACKUP_ID_FIELD_NUMBER;
hash = (53 * hash) + getBackupId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.filestore.v1beta1.CreateBackupRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* CreateBackupRequest creates a backup.
* </pre>
*
* Protobuf type {@code google.cloud.filestore.v1beta1.CreateBackupRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.filestore.v1beta1.CreateBackupRequest)
com.google.cloud.filestore.v1beta1.CreateBackupRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateBackupRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateBackupRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.filestore.v1beta1.CreateBackupRequest.class,
com.google.cloud.filestore.v1beta1.CreateBackupRequest.Builder.class);
}
// Construct using com.google.cloud.filestore.v1beta1.CreateBackupRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getBackupFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
backup_ = null;
if (backupBuilder_ != null) {
backupBuilder_.dispose();
backupBuilder_ = null;
}
backupId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateBackupRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateBackupRequest getDefaultInstanceForType() {
return com.google.cloud.filestore.v1beta1.CreateBackupRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateBackupRequest build() {
com.google.cloud.filestore.v1beta1.CreateBackupRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateBackupRequest buildPartial() {
com.google.cloud.filestore.v1beta1.CreateBackupRequest result =
new com.google.cloud.filestore.v1beta1.CreateBackupRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.filestore.v1beta1.CreateBackupRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.backup_ = backupBuilder_ == null ? backup_ : backupBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.backupId_ = backupId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.filestore.v1beta1.CreateBackupRequest) {
return mergeFrom((com.google.cloud.filestore.v1beta1.CreateBackupRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.filestore.v1beta1.CreateBackupRequest other) {
if (other == com.google.cloud.filestore.v1beta1.CreateBackupRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasBackup()) {
mergeBackup(other.getBackup());
}
if (!other.getBackupId().isEmpty()) {
backupId_ = other.backupId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getBackupFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
backupId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The backup's project and location, in the format
* `projects/{project_id}/locations/{location}`. In Filestore,
* backup locations map to Google Cloud regions, for example **us-west1**.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The backup's project and location, in the format
* `projects/{project_id}/locations/{location}`. In Filestore,
* backup locations map to Google Cloud regions, for example **us-west1**.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The backup's project and location, in the format
* `projects/{project_id}/locations/{location}`. In Filestore,
* backup locations map to Google Cloud regions, for example **us-west1**.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The backup's project and location, in the format
* `projects/{project_id}/locations/{location}`. In Filestore,
* backup locations map to Google Cloud regions, for example **us-west1**.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The backup's project and location, in the format
* `projects/{project_id}/locations/{location}`. In Filestore,
* backup locations map to Google Cloud regions, for example **us-west1**.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.filestore.v1beta1.Backup backup_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Backup,
com.google.cloud.filestore.v1beta1.Backup.Builder,
com.google.cloud.filestore.v1beta1.BackupOrBuilder>
backupBuilder_;
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the backup field is set.
*/
public boolean hasBackup() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The backup.
*/
public com.google.cloud.filestore.v1beta1.Backup getBackup() {
if (backupBuilder_ == null) {
return backup_ == null
? com.google.cloud.filestore.v1beta1.Backup.getDefaultInstance()
: backup_;
} else {
return backupBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setBackup(com.google.cloud.filestore.v1beta1.Backup value) {
if (backupBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
backup_ = value;
} else {
backupBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setBackup(com.google.cloud.filestore.v1beta1.Backup.Builder builderForValue) {
if (backupBuilder_ == null) {
backup_ = builderForValue.build();
} else {
backupBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeBackup(com.google.cloud.filestore.v1beta1.Backup value) {
if (backupBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& backup_ != null
&& backup_ != com.google.cloud.filestore.v1beta1.Backup.getDefaultInstance()) {
getBackupBuilder().mergeFrom(value);
} else {
backup_ = value;
}
} else {
backupBuilder_.mergeFrom(value);
}
if (backup_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearBackup() {
bitField0_ = (bitField0_ & ~0x00000002);
backup_ = null;
if (backupBuilder_ != null) {
backupBuilder_.dispose();
backupBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.filestore.v1beta1.Backup.Builder getBackupBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getBackupFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.filestore.v1beta1.BackupOrBuilder getBackupOrBuilder() {
if (backupBuilder_ != null) {
return backupBuilder_.getMessageOrBuilder();
} else {
return backup_ == null
? com.google.cloud.filestore.v1beta1.Backup.getDefaultInstance()
: backup_;
}
}
/**
*
*
* <pre>
* Required. A [backup resource][google.cloud.filestore.v1beta1.Backup]
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Backup,
com.google.cloud.filestore.v1beta1.Backup.Builder,
com.google.cloud.filestore.v1beta1.BackupOrBuilder>
getBackupFieldBuilder() {
if (backupBuilder_ == null) {
backupBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Backup,
com.google.cloud.filestore.v1beta1.Backup.Builder,
com.google.cloud.filestore.v1beta1.BackupOrBuilder>(
getBackup(), getParentForChildren(), isClean());
backup_ = null;
}
return backupBuilder_;
}
private java.lang.Object backupId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the backup.
* The ID must be unique within the specified project and location.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string backup_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The backupId.
*/
public java.lang.String getBackupId() {
java.lang.Object ref = backupId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
backupId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the backup.
* The ID must be unique within the specified project and location.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string backup_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for backupId.
*/
public com.google.protobuf.ByteString getBackupIdBytes() {
java.lang.Object ref = backupId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
backupId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the backup.
* The ID must be unique within the specified project and location.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string backup_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The backupId to set.
* @return This builder for chaining.
*/
public Builder setBackupId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
backupId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the backup.
* The ID must be unique within the specified project and location.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string backup_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearBackupId() {
backupId_ = getDefaultInstance().getBackupId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the backup.
* The ID must be unique within the specified project and location.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string backup_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for backupId to set.
* @return This builder for chaining.
*/
public Builder setBackupIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
backupId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.filestore.v1beta1.CreateBackupRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.filestore.v1beta1.CreateBackupRequest)
private static final com.google.cloud.filestore.v1beta1.CreateBackupRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.filestore.v1beta1.CreateBackupRequest();
}
public static com.google.cloud.filestore.v1beta1.CreateBackupRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateBackupRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateBackupRequest>() {
@java.lang.Override
public CreateBackupRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateBackupRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateBackupRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateBackupRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ofbiz-framework | 38,261 | applications/order/src/main/java/org/apache/ofbiz/order/shoppinglist/ShoppingListEvents.java | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.order.shoppinglist;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import jakarta.servlet.http.Cookie;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import jakarta.servlet.http.HttpSession;
import org.apache.ofbiz.base.util.Debug;
import org.apache.ofbiz.base.util.GeneralException;
import org.apache.ofbiz.base.util.UtilDateTime;
import org.apache.ofbiz.base.util.UtilHttp;
import org.apache.ofbiz.base.util.UtilMisc;
import org.apache.ofbiz.base.util.UtilProperties;
import org.apache.ofbiz.base.util.UtilValidate;
import org.apache.ofbiz.entity.Delegator;
import org.apache.ofbiz.entity.GenericEntityException;
import org.apache.ofbiz.entity.GenericValue;
import org.apache.ofbiz.entity.util.EntityQuery;
import org.apache.ofbiz.entity.util.EntityUtil;
import org.apache.ofbiz.order.shoppingcart.CartItemModifyException;
import org.apache.ofbiz.order.shoppingcart.ItemNotFoundException;
import org.apache.ofbiz.order.shoppingcart.ShoppingCart;
import org.apache.ofbiz.order.shoppingcart.ShoppingCartEvents;
import org.apache.ofbiz.order.shoppingcart.ShoppingCartItem;
import org.apache.ofbiz.product.catalog.CatalogWorker;
import org.apache.ofbiz.product.config.ProductConfigWorker;
import org.apache.ofbiz.product.config.ProductConfigWrapper;
import org.apache.ofbiz.product.store.ProductStoreWorker;
import org.apache.ofbiz.service.GenericServiceException;
import org.apache.ofbiz.service.LocalDispatcher;
import org.apache.ofbiz.service.ServiceUtil;
import org.apache.ofbiz.webapp.website.WebSiteWorker;
/**
* Shopping cart events.
*/
public class ShoppingListEvents {
private static final String PERSISTANT_LIST_NAME = "auto-save";
private static final String MODULE = ShoppingListEvents.class.getName();
private static final String RES_ERROR = "OrderErrorUiLabels";
public static String addBulkFromCart(HttpServletRequest request, HttpServletResponse response) {
Delegator delegator = (Delegator) request.getAttribute("delegator");
LocalDispatcher dispatcher = (LocalDispatcher) request.getAttribute("dispatcher");
ShoppingCart cart = ShoppingCartEvents.getCartObject(request);
GenericValue userLogin = (GenericValue) request.getSession().getAttribute("userLogin");
String shoppingListId = request.getParameter("shoppingListId");
String shoppingListTypeId = request.getParameter("shoppingListTypeId");
String selectedCartItems[] = request.getParameterValues("selectedItem");
if (UtilValidate.isEmpty(selectedCartItems)) {
selectedCartItems = makeCartItemsArray(cart);
}
try {
shoppingListId = addBulkFromCart(delegator, dispatcher, cart, userLogin, shoppingListId, shoppingListTypeId, selectedCartItems, true,
true);
} catch (IllegalArgumentException e) {
request.setAttribute("_ERROR_MESSAGE_", e.getMessage());
return "error";
}
request.setAttribute("shoppingListId", shoppingListId);
return "success";
}
public static String addBulkFromCart(Delegator delegator, LocalDispatcher dispatcher, ShoppingCart cart, GenericValue userLogin,
String shoppingListId, String shoppingListTypeId, String[] items, boolean allowPromo, boolean append)
throws IllegalArgumentException {
String errMsg = null;
if (items == null || items.length == 0) {
errMsg = UtilProperties.getMessage(RES_ERROR, "shoppinglistevents.select_items_to_add_to_list", cart.getLocale());
throw new IllegalArgumentException(errMsg);
}
if (UtilValidate.isEmpty(shoppingListId)) {
// create a new shopping list
Map<String, Object> newListResult = null;
try {
newListResult = dispatcher.runSync("createShoppingList", UtilMisc.<String, Object>toMap("userLogin", userLogin,
"productStoreId", cart.getProductStoreId(), "partyId", cart.getOrderPartyId(),
"shoppingListTypeId", shoppingListTypeId, "currencyUom", cart.getCurrency()),
90, true);
if (ServiceUtil.isError(newListResult)) {
String errorMessage = ServiceUtil.getErrorMessage(newListResult);
Debug.logError(errorMessage, MODULE);
throw new IllegalArgumentException(errorMessage);
}
} catch (GenericServiceException e) {
Debug.logError(e, "Problems creating new ShoppingList", MODULE);
errMsg = UtilProperties.getMessage(RES_ERROR, "shoppinglistevents.cannot_create_new_shopping_list", cart.getLocale());
throw new IllegalArgumentException(errMsg);
}
// get the new list id
if (newListResult != null) {
shoppingListId = (String) newListResult.get("shoppingListId");
}
// if no list was created throw an error
if (shoppingListId == null || "".equals(shoppingListId)) {
errMsg = UtilProperties.getMessage(RES_ERROR, "shoppinglistevents.shoppingListId_is_required_parameter", cart.getLocale());
throw new IllegalArgumentException(errMsg);
}
} else if (!append) {
try {
clearListInfo(delegator, shoppingListId);
} catch (GenericEntityException e) {
Debug.logError(e, MODULE);
throw new IllegalArgumentException("Could not clear current shopping list: " + e.toString());
}
}
for (String item2 : items) {
Integer cartIdInt = null;
try {
cartIdInt = Integer.valueOf(item2);
} catch (Exception e) {
Debug.logWarning(e, UtilProperties.getMessage(RES_ERROR, "OrderIllegalCharacterInSelectedItemField", cart.getLocale()), MODULE);
}
if (cartIdInt != null) {
ShoppingCartItem item = cart.findCartItem(cartIdInt);
if (allowPromo || !item.getIsPromo()) {
Debug.logInfo("Adding cart item to shopping list [" + shoppingListId + "], allowPromo=" + allowPromo + ", item.getIsPromo()="
+ item.getIsPromo() + ", item.getProductId()=" + item.getProductId() + ", item.getQuantity()=" + item.getQuantity(),
MODULE);
Map<String, String> itemAttributes = item.getOrderItemAttributes();
Map<String, Object> serviceResult = null;
try {
Map<String, Object> ctx = UtilMisc.<String, Object>toMap("userLogin", userLogin,
"shoppingListId", shoppingListId,
"productId", item.getProductId(), "quantity", item.getQuantity());
ctx.put("shoppingListItemAttributes", itemAttributes);
ctx.put("reservStart", item.getReservStart());
ctx.put("reservLength", item.getReservLength());
ctx.put("reservPersons", item.getReservPersons());
if (item.getConfigWrapper() != null) {
ctx.put("configId", item.getConfigWrapper().getConfigId());
}
serviceResult = dispatcher.runSync("createShoppingListItem", ctx);
if (ServiceUtil.isError(serviceResult)) {
String errorMessage = ServiceUtil.getErrorMessage(serviceResult);
Debug.logError(errorMessage, MODULE);
throw new IllegalArgumentException(errorMessage);
}
} catch (GenericServiceException e) {
Debug.logError(e, "Problems creating ShoppingList item entity", MODULE);
errMsg = UtilProperties.getMessage(RES_ERROR, "shoppinglistevents.error_adding_item_to_shopping_list", cart.getLocale());
throw new IllegalArgumentException(errMsg);
}
// store all currenlty existing OrderItemAttributes as ShoppingListItemAttributes
if (UtilValidate.isNotEmpty(itemAttributes)) {
for (Map.Entry<String, String> itemAttrib : itemAttributes.entrySet()) {
try {
GenericValue sliAttrib = delegator.makeValue("ShoppingListItemAttribute", UtilMisc
.toMap("shoppingListId", shoppingListId,
"shoppingListItemSeqId", serviceResult.get("shoppingListItemSeqId"),
"attrName", itemAttrib.getKey(), "attrValue", itemAttrib.getValue()));
delegator.createOrStore(sliAttrib);
} catch (GenericEntityException e) {
Debug.logError(e, "Problems creating ShoppingListItemAttribute entity", MODULE);
errMsg = UtilProperties.getMessage(RES_ERROR,
"shoppinglistevents.error_adding_item_to_shopping_list", cart.getLocale());
throw new IllegalArgumentException(errMsg);
}
}
}
}
}
}
// return the shoppinglist id
return shoppingListId;
}
public static String addListToCart(HttpServletRequest request, HttpServletResponse response) {
Delegator delegator = (Delegator) request.getAttribute("delegator");
LocalDispatcher dispatcher = (LocalDispatcher) request.getAttribute("dispatcher");
ShoppingCart cart = ShoppingCartEvents.getCartObject(request);
String shoppingListId = request.getParameter("shoppingListId");
String includeChild = request.getParameter("includeChild");
String prodCatalogId = CatalogWorker.getCurrentCatalogId(request);
try {
addListToCart(delegator, dispatcher, cart, prodCatalogId, shoppingListId, (includeChild != null), true, true);
} catch (IllegalArgumentException e) {
request.setAttribute("_ERROR_MESSAGE_", e.getMessage());
return "error";
}
return "success";
}
public static String addListToCart(Delegator delegator, LocalDispatcher dispatcher, ShoppingCart cart, String prodCatalogId,
String shoppingListId, boolean includeChild, boolean setAsListItem, boolean append)
throws java.lang.IllegalArgumentException {
String errMsg = null;
// no list; no add
if (shoppingListId == null) {
errMsg = UtilProperties.getMessage(RES_ERROR, "shoppinglistevents.choose_shopping_list", cart.getLocale());
throw new IllegalArgumentException(errMsg);
}
// get the shopping list
GenericValue shoppingList = null;
List<GenericValue> shoppingListItems = null;
try {
shoppingList = EntityQuery.use(delegator).from("ShoppingList").where("shoppingListId", shoppingListId).queryOne();
if (shoppingList == null) {
errMsg = UtilProperties.getMessage(RES_ERROR, "shoppinglistevents.error_getting_shopping_list_and_items", cart.getLocale());
throw new IllegalArgumentException(errMsg);
}
shoppingListItems = shoppingList.getRelated("ShoppingListItem", null, null, false);
if (shoppingListItems == null) {
shoppingListItems = new LinkedList<>();
}
// include all items of child lists if flagged to do so
if (includeChild) {
List<GenericValue> childShoppingLists = shoppingList.getRelated("ChildShoppingList", null, null, false);
for (GenericValue v : childShoppingLists) {
List<GenericValue> items = v.getRelated("ShoppingListItem", null, null, false);
shoppingListItems.addAll(items);
}
}
} catch (GenericEntityException e) {
Debug.logError(e, "Problems getting ShoppingList and ShoppingListItem records", MODULE);
errMsg = UtilProperties.getMessage(RES_ERROR, "shoppinglistevents.error_getting_shopping_list_and_items", cart.getLocale());
throw new IllegalArgumentException(errMsg);
}
// no items; not an error; just mention that nothing was added
if (UtilValidate.isEmpty(shoppingListItems)) {
errMsg = UtilProperties.getMessage(RES_ERROR, "shoppinglistevents.no_items_added", cart.getLocale());
return errMsg;
}
// check if we are to clear the cart first
if (!append) {
cart.clear();
// Prevent the system from creating a new shopping list every time the cart is restored for anonymous user.
cart.setAutoSaveListId(shoppingListId);
}
// get the survey info for all the items
Map<String, List<String>> shoppingListSurveyInfo = getItemSurveyInfos(shoppingListItems);
// get the itemAttributeInfos for all the items
Map<String, Map<String, String>> itemAttributeInfos = getItemAttributeInfos(shoppingListItems);
// add the items
StringBuilder eventMessage = new StringBuilder();
for (GenericValue shoppingListItem : shoppingListItems) {
String productId = shoppingListItem.getString("productId");
BigDecimal quantity = shoppingListItem.getBigDecimal("quantity");
Timestamp reservStart = shoppingListItem.getTimestamp("reservStart");
BigDecimal reservLength = shoppingListItem.getBigDecimal("reservLength");
BigDecimal reservPersons = shoppingListItem.getBigDecimal("reservPersons");
String configId = shoppingListItem.getString("configId");
try {
String listId = shoppingListItem.getString("shoppingListId");
String itemId = shoppingListItem.getString("shoppingListItemSeqId");
Map<String, Object> attributes = new HashMap<>();
// list items are noted in the shopping cart
if (setAsListItem) {
attributes.put("shoppingListId", listId);
attributes.put("shoppingListItemSeqId", itemId);
}
// check if we have existing survey responses to append
if (shoppingListSurveyInfo.containsKey(listId + "." + itemId) && UtilValidate.isNotEmpty(shoppingListSurveyInfo.get(listId + "."
+ itemId))) {
attributes.put("surveyResponses", shoppingListSurveyInfo.get(listId + "." + itemId));
}
ProductConfigWrapper configWrapper = null;
if (UtilValidate.isNotEmpty(configId)) {
configWrapper = ProductConfigWorker.loadProductConfigWrapper(delegator, dispatcher, configId, productId,
cart.getProductStoreId(), prodCatalogId, cart.getWebSiteId(), cart.getCurrency(), cart.getLocale(),
cart.getAutoUserLogin());
}
// TODO: add code to check for survey response requirement
// add shoppingListItemAttributes as orderItemAttributes to cart item
Map<String, String> orderItemAttributes = itemAttributeInfos.get(listId + "." + itemId);
cart.addOrIncreaseItem(productId, null, quantity, reservStart, reservLength, reservPersons, null, null,
null, null, null, attributes, orderItemAttributes, prodCatalogId, configWrapper, null, null,
null, dispatcher);
Map<String, Object> messageMap = UtilMisc.<String, Object>toMap("productId", productId);
errMsg = UtilProperties.getMessage(RES_ERROR, "shoppinglistevents.added_product_to_cart", messageMap, cart.getLocale());
eventMessage.append(errMsg).append("\n");
} catch (CartItemModifyException e) {
Debug.logWarning(e, UtilProperties.getMessage(RES_ERROR, "OrderProblemsAddingItemFromListToCart", cart.getLocale()));
Map<String, Object> messageMap = UtilMisc.<String, Object>toMap("productId", productId);
errMsg = UtilProperties.getMessage(RES_ERROR, "shoppinglistevents.problem_adding_product_to_cart", messageMap, cart.getLocale());
eventMessage.append(errMsg).append("\n");
} catch (ItemNotFoundException e) {
Debug.logWarning(e, UtilProperties.getMessage(RES_ERROR, "OrderProductNotFound", cart.getLocale()));
Map<String, Object> messageMap = UtilMisc.<String, Object>toMap("productId", productId);
errMsg = UtilProperties.getMessage(RES_ERROR, "shoppinglistevents.problem_adding_product_to_cart", messageMap, cart.getLocale());
eventMessage.append(errMsg).append("\n");
}
}
if (eventMessage.length() > 0) {
return eventMessage.toString();
}
// all done
return ""; // no message to return; will simply reply as success
}
public static String replaceShoppingListItem(HttpServletRequest request, HttpServletResponse response) {
String quantityStr = request.getParameter("quantity");
// just call the updateShoppingListItem service
LocalDispatcher dispatcher = (LocalDispatcher) request.getAttribute("dispatcher");
GenericValue userLogin = (GenericValue) request.getSession().getAttribute("userLogin");
Locale locale = UtilHttp.getLocale(request);
BigDecimal quantity = null;
try {
quantity = new BigDecimal(quantityStr);
} catch (NumberFormatException e) {
Debug.logError(e, "do nothing, just won't pass to service if it is null", MODULE);
}
Map<String, Object> serviceInMap = new HashMap<>();
serviceInMap.put("shoppingListId", request.getParameter("shoppingListId"));
serviceInMap.put("shoppingListItemSeqId", request.getParameter("shoppingListItemSeqId"));
serviceInMap.put("productId", request.getParameter("add_product_id"));
serviceInMap.put("userLogin", userLogin);
if (quantity != null) serviceInMap.put("quantity", quantity);
Map<String, Object> result = null;
try {
result = dispatcher.runSync("updateShoppingListItem", serviceInMap);
if (ServiceUtil.isError(result)) {
String errorMessage = ServiceUtil.getErrorMessage(result);
request.setAttribute("_ERROR_MESSAGE_", errorMessage);
Debug.logError(errorMessage, MODULE);
return "error";
}
} catch (GenericServiceException e) {
String errMsg = UtilProperties.getMessage(RES_ERROR, "shoppingListEvents.error_calling_update", locale) + ": " + e.toString();
request.setAttribute("_ERROR_MESSAGE_", errMsg);
String errorMsg = "Error calling the updateShoppingListItem in handleShoppingListItemVariant: " + e.toString();
Debug.logError(e, errorMsg, MODULE);
return "error";
}
return "success";
}
/**
* Finds or creates a specialized (auto-save) shopping list used to record shopping bag contents between user visits.
*/
public static String getAutoSaveListId(Delegator delegator, LocalDispatcher dispatcher, String partyId, GenericValue userLogin,
String productStoreId) throws GenericEntityException, GenericServiceException {
if (partyId == null && userLogin != null) {
partyId = userLogin.getString("partyId");
}
String autoSaveListId = null;
GenericValue list = null;
// TODO: add sorting, just in case there are multiple...
if (partyId != null) {
Map<String, Object> findMap = UtilMisc.<String, Object>toMap("partyId", partyId, "productStoreId", productStoreId, "shoppingListTypeId",
"SLT_SPEC_PURP", "listName", PERSISTANT_LIST_NAME);
List<GenericValue> existingLists = EntityQuery.use(delegator).from("ShoppingList").where(findMap).queryList();
Debug.logInfo("Finding existing auto-save shopping list with: \nfindMap: " + findMap + "\nlists: " + existingLists, MODULE);
if (UtilValidate.isNotEmpty(existingLists)) {
list = EntityUtil.getFirst(existingLists);
autoSaveListId = list.getString("shoppingListId");
}
}
if (list == null && dispatcher != null) {
Map<String, Object> listFields = UtilMisc.<String, Object>toMap("userLogin", userLogin, "productStoreId", productStoreId,
"shoppingListTypeId", "SLT_SPEC_PURP", "listName", PERSISTANT_LIST_NAME);
Map<String, Object> newListResult = dispatcher.runSync("createShoppingList", listFields, 90, true);
if (ServiceUtil.isError(newListResult)) {
String errorMessage = ServiceUtil.getErrorMessage(newListResult);
Debug.logError(errorMessage, MODULE);
return null;
}
if (newListResult != null) {
autoSaveListId = (String) newListResult.get("shoppingListId");
}
}
return autoSaveListId;
}
/**
* Fills the specialized shopping list with the current shopping cart if one exists (if not leaves it alone)
*/
public static void fillAutoSaveList(ShoppingCart cart, LocalDispatcher dispatcher) throws GeneralException {
if (cart != null && dispatcher != null) {
GenericValue userLogin = ShoppingListEvents.getCartUserLogin(cart);
Delegator delegator = cart.getDelegator();
String autoSaveListId = cart.getAutoSaveListId();
if (autoSaveListId == null) {
autoSaveListId = getAutoSaveListId(delegator, dispatcher, null, userLogin, cart.getProductStoreId());
cart.setAutoSaveListId(autoSaveListId);
}
GenericValue shoppingList = EntityQuery.use(delegator).from("ShoppingList").where("shoppingListId", autoSaveListId).queryOne();
Integer currentListSize = 0;
if (UtilValidate.isNotEmpty(shoppingList)) {
List<GenericValue> shoppingListItems = shoppingList.getRelated("ShoppingListItem", null, null, false);
if (UtilValidate.isNotEmpty(shoppingListItems)) {
currentListSize = shoppingListItems.size();
}
}
try {
String[] itemsArray = makeCartItemsArray(cart);
if (itemsArray.length != 0) {
addBulkFromCart(delegator, dispatcher, cart, userLogin, autoSaveListId, null, itemsArray, false, false);
} else if (currentListSize != 0) {
clearListInfo(delegator, autoSaveListId);
}
} catch (IllegalArgumentException e) {
throw new GeneralException(e.getMessage(), e);
}
}
}
/**
* Saves the shopping cart to the specialized (auto-save) shopping list
*/
public static String saveCartToAutoSaveList(HttpServletRequest request, HttpServletResponse response) {
LocalDispatcher dispatcher = (LocalDispatcher) request.getAttribute("dispatcher");
ShoppingCart cart = ShoppingCartEvents.getCartObject(request);
try {
fillAutoSaveList(cart, dispatcher);
} catch (GeneralException e) {
Debug.logError(e, "Error saving the cart to the auto-save list: " + e.toString(), MODULE);
}
return "success";
}
/**
* Restores the specialized (auto-save) shopping list back into the shopping cart
*/
public static String restoreAutoSaveList(HttpServletRequest request, HttpServletResponse response) {
Delegator delegator = (Delegator) request.getAttribute("delegator");
LocalDispatcher dispatcher = (LocalDispatcher) request.getAttribute("dispatcher");
GenericValue productStore = ProductStoreWorker.getProductStore(request);
HttpSession session = request.getSession();
ShoppingCart cart = ShoppingCartEvents.getCartObject(request);
// locate the user's identity
GenericValue userLogin = (GenericValue) session.getAttribute("userLogin");
if (userLogin == null) {
userLogin = (GenericValue) session.getAttribute("autoUserLogin");
}
if (!ProductStoreWorker.autoSaveCart(productStore) || userLogin == null) {
// if auto-save is disabled or there is still no userLogin just return here
return "success";
}
// safety check for missing required parameter.
if (cart.getWebSiteId() == null) {
cart.setWebSiteId(WebSiteWorker.getWebSiteId(request));
}
// find the list ID
String autoSaveListId = cart.getAutoSaveListId();
if (autoSaveListId == null) {
try {
autoSaveListId = getAutoSaveListId(delegator, dispatcher, null, userLogin, cart.getProductStoreId());
} catch (GeneralException e) {
Debug.logError(e, MODULE);
}
cart.setAutoSaveListId(autoSaveListId);
} else {
String existingAutoSaveListId = null;
try {
existingAutoSaveListId = getAutoSaveListId(delegator, dispatcher, null, userLogin, cart.getProductStoreId());
} catch (GeneralException e) {
Debug.logError(e, MODULE);
}
if (existingAutoSaveListId != null) {
if (!existingAutoSaveListId.equals(autoSaveListId)) {
// Replace with existing shopping list
cart.setAutoSaveListId(existingAutoSaveListId);
autoSaveListId = existingAutoSaveListId;
cart.setLastListRestore(null);
} else {
// CASE: User first login and logout and then re-login again. This condition does not require a restore at all
// because at this point items in the cart and the items in the shopping list are same so just return.
return "success";
}
}
}
// check to see if we are okay to load this list
java.sql.Timestamp lastLoad = cart.getLastListRestore();
boolean okayToLoad = autoSaveListId != null && lastLoad == null;
if (!okayToLoad && lastLoad != null) {
GenericValue shoppingList = null;
try {
shoppingList = EntityQuery.use(delegator).from("ShoppingList").where("shoppingListId", autoSaveListId).queryOne();
} catch (GenericEntityException e) {
Debug.logError(e, MODULE);
}
if (shoppingList != null) {
java.sql.Timestamp lastModified = shoppingList.getTimestamp("lastAdminModified");
if (lastModified != null) {
if (lastModified.after(lastLoad)) {
okayToLoad = true;
}
if (cart.size() == 0 && lastModified.after(cart.getCartCreatedTime())) {
okayToLoad = true;
}
}
}
}
// load (restore) the list of we have determined it is okay to load
if (okayToLoad) {
String prodCatalogId = CatalogWorker.getCurrentCatalogId(request);
try {
addListToCart(delegator, dispatcher, cart, prodCatalogId, autoSaveListId, false, false, true);
cart.setLastListRestore(UtilDateTime.nowTimestamp());
} catch (IllegalArgumentException e) {
Debug.logError(e, MODULE);
}
}
return "success";
}
/**
* Remove all items from the given list.
*/
public static int clearListInfo(Delegator delegator, String shoppingListId) throws GenericEntityException {
// remove the survey responses first
delegator.removeByAnd("ShoppingListItemSurvey", UtilMisc.toMap("shoppingListId", shoppingListId));
delegator.removeByAnd("ShoppingListItemAttribute", UtilMisc.toMap("shoppingListId", shoppingListId));
// next remove the items
return delegator.removeByAnd("ShoppingListItem", UtilMisc.toMap("shoppingListId", shoppingListId));
}
/**
* Creates records for survey responses on survey items
*/
public static int makeListItemSurveyResp(Delegator delegator, GenericValue item, List<String> surveyResps) throws GenericEntityException {
if (UtilValidate.isNotEmpty(surveyResps)) {
int count = 0;
for (String responseId : surveyResps) {
GenericValue listResp = delegator.makeValue("ShoppingListItemSurvey");
listResp.set("shoppingListId", item.getString("shoppingListId"));
listResp.set("shoppingListItemSeqId", item.getString("shoppingListItemSeqId"));
listResp.set("surveyResponseId", responseId);
delegator.create(listResp);
count++;
}
return count;
}
return -1;
}
/**
* Returns Map keyed on item sequence ID containing a map of item attributes
*/
public static Map<String, Map<String, String>> getItemAttributeInfos(List<GenericValue> items) {
Map<String, Map<String, String>> attributeInfos = new HashMap<>();
if (UtilValidate.isNotEmpty(items)) {
for (GenericValue item : items) {
String listId = item.getString("shoppingListId");
String itemId = item.getString("shoppingListItemSeqId");
String itemKey = listId + "." + itemId;
try {
List<GenericValue> itemAttributes = item.getRelated("ShoppingListItemAttribute", null, null, true);
for (GenericValue attribute : itemAttributes) {
Map<String, String> attribMap = attributeInfos.get(itemKey);
if (attribMap == null) {
attribMap = new HashMap<>();
attributeInfos.put(itemKey, attribMap);
}
attribMap.put(attribute.getString("attrName"), attribute.getString("attrValue"));
}
} catch (GenericEntityException e) {
Debug.logWarning(e, "Error loading related ShoppingListItemAttributes for shoppingListItem "
+ item);
}
}
}
return attributeInfos;
}
/**
* Returns Map keyed on item sequence ID containing a list of survey response IDs
*/
public static Map<String, List<String>> getItemSurveyInfos(List<GenericValue> items) {
Map<String, List<String>> surveyInfos = new HashMap<>();
if (UtilValidate.isNotEmpty(items)) {
for (GenericValue item : items) {
String listId = item.getString("shoppingListId");
String itemId = item.getString("shoppingListItemSeqId");
surveyInfos.put(listId + "." + itemId, getItemSurveyInfo(item));
}
}
return surveyInfos;
}
/**
* Returns a list of survey response IDs for a shopping list item
*/
public static List<String> getItemSurveyInfo(GenericValue item) {
List<String> responseIds = new LinkedList<>();
List<GenericValue> surveyResp = null;
try {
surveyResp = item.getRelated("ShoppingListItemSurvey", null, null, false);
} catch (GenericEntityException e) {
Debug.logError(e, MODULE);
}
if (UtilValidate.isNotEmpty(surveyResp)) {
for (GenericValue resp : surveyResp) {
responseIds.add(resp.getString("surveyResponseId"));
}
}
return responseIds;
}
private static GenericValue getCartUserLogin(ShoppingCart cart) {
GenericValue ul = cart.getUserLogin();
if (ul == null) {
ul = cart.getAutoUserLogin();
}
return ul;
}
private static String[] makeCartItemsArray(ShoppingCart cart) {
int len = cart.size();
String[] arr = new String[len];
for (int i = 0; i < len; i++) {
arr[i] = Integer.toString(i);
}
return arr;
}
/**
* Create the guest cookies for a shopping list
*/
public static String createGuestShoppingListCookies(HttpServletRequest request, HttpServletResponse response) {
Delegator delegator = (Delegator) request.getAttribute("delegator");
LocalDispatcher dispatcher = (LocalDispatcher) request.getAttribute("dispatcher");
HttpSession session = request.getSession(true);
ShoppingCart cart = (ShoppingCart) session.getAttribute("shoppingCart");
GenericValue userLogin = (GenericValue) session.getAttribute("userLogin");
Properties systemProps = System.getProperties();
String guestShoppingUserName = "GuestShoppingListId_" + systemProps.getProperty("user.name").replace(" ", "_");
String productStoreId = ProductStoreWorker.getProductStoreId(request);
int cookieAge = (60 * 60 * 24 * 30);
String autoSaveListId = null;
Cookie[] cookies = request.getCookies();
// check userLogin
if (userLogin != null) {
String partyId = userLogin.getString("partyId");
if (UtilValidate.isEmpty(partyId)) {
return "success";
}
}
// find shopping list ID
if (cookies != null) {
for (Cookie cookie : cookies) {
if (cookie.getName().equals(guestShoppingUserName)) {
autoSaveListId = cookie.getValue();
break;
}
}
}
// clear the auto-save info
if (userLogin != null && ProductStoreWorker.autoSaveCart(delegator, productStoreId)) {
if (UtilValidate.isEmpty(autoSaveListId)) {
try {
Map<String, Object> listFields = UtilMisc.<String, Object>toMap("userLogin", userLogin, "productStoreId", productStoreId,
"shoppingListTypeId", "SLT_SPEC_PURP", "listName", PERSISTANT_LIST_NAME);
Map<String, Object> newListResult = dispatcher.runSync("createShoppingList", listFields, 90, true);
if (ServiceUtil.isError(newListResult)) {
String errorMessage = ServiceUtil.getErrorMessage(newListResult);
Debug.logError(errorMessage, MODULE);
return null;
}
if (newListResult != null) {
autoSaveListId = (String) newListResult.get("shoppingListId");
}
} catch (GeneralException e) {
Debug.logError(e, MODULE);
}
Cookie guestShoppingListCookie = new Cookie(guestShoppingUserName, autoSaveListId);
guestShoppingListCookie.setMaxAge(cookieAge);
guestShoppingListCookie.setPath("/");
guestShoppingListCookie.setSecure(true);
guestShoppingListCookie.setHttpOnly(true);
response.addCookie(guestShoppingListCookie);
}
}
if (UtilValidate.isNotEmpty(autoSaveListId)) {
if (UtilValidate.isNotEmpty(cart)) {
cart.setAutoSaveListId(autoSaveListId);
} else {
cart = ShoppingCartEvents.getCartObject(request);
cart.setAutoSaveListId(autoSaveListId);
}
}
return "success";
}
/**
* Clear the guest cookies for a shopping list
*/
public static String clearGuestShoppingListCookies(HttpServletRequest request, HttpServletResponse response) {
Properties systemProps = System.getProperties();
String guestShoppingUserName = "GuestShoppingListId_" + systemProps.getProperty("user.name").replace(" ", "_");
Cookie guestShoppingListCookie = new Cookie(guestShoppingUserName, null);
guestShoppingListCookie.setMaxAge(0);
guestShoppingListCookie.setPath("/");
response.addCookie(guestShoppingListCookie);
return "success";
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.